Files
Photovoltaic_Fault_Detector/GPS_Panel/Classifier/Train_Classifier_AllDeep.ipynb
Daniel Saavedra 0688f50313 Classifier SVM
2020-11-25 16:03:59 -03:00

2884 lines
256 KiB
Plaintext

{
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"metadata": {},
"outputs": [],
"source": [
"# import the necessary packages\n",
"import matplotlib.pyplot as plt\n",
"from imutils import paths\n",
"import numpy as np\n",
"import argparse\n",
"import imutils\n",
"import pickle\n",
"import cv2\n",
"import os\n",
"from sklearn.preprocessing import LabelEncoder\n",
"from sklearn.svm import SVC\n",
"from imutils.video import VideoStream\n",
"from imutils.video import FPS\n",
"import time\n",
"import tensorflow as tf\n",
"from tensorflow.keras.layers import *\n",
"from tensorflow.keras.preprocessing.image import ImageDataGenerator\n",
"from tensorflow.keras.callbacks import ModelCheckpoint, EarlyStopping\n",
"\n",
"tf.keras.backend.clear_session()"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {},
"outputs": [],
"source": [
"\n",
"# load our serialized face detector from disk\n",
"# fix seed for reproducible results (only works on CPU, not GPU)\n",
"seed = 10\n",
"np.random.seed(seed=seed)\n",
"tf.random.set_seed(seed=seed)\n",
"\n",
"# hyper parameters for model\n",
"nb_classes = 2 # number of classes\n",
"\n",
"img_width, img_height = 32, 64 # change based on the shape/structure of your images\n",
"batch_size = 4 # try 4, 8, 16, 32, 64, 128, 256 dependent on CPU/GPU memory capacity (powers of 2 values).\n",
"nb_epoch = 500 # number of iteration the algorithm gets trained.\n",
"learn_rate = 1e-4 # sgd learning rate\n",
"momentum = .9 # sgd momentum to avoid local minimum\n",
"rotation_ratio = 10 # how aggressive will be the data augmentation/transformation\n",
"zoom_range = .05\n",
"width_shift_range=0.05\n",
"height_shift_range=0.05\n",
"shear_range=0.05\n",
"fill_mode='nearest'\n",
"\n",
"patience = int(nb_epoch/3)\n",
"\n",
"train_data_dir = './Data_set_2/Data_prueba_0/' # Inside, each class should have it's own folder\n",
"#validation_data_dir = './dataset/' # each class should have it's own folder\n",
"model_path = './model_2/'"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Model: \"model\"\n",
"__________________________________________________________________________________________________\n",
"Layer (type) Output Shape Param # Connected to \n",
"==================================================================================================\n",
"input_1 (InputLayer) [(None, 32, 64, 1)] 0 \n",
"__________________________________________________________________________________________________\n",
"stem_conv1 (Conv2D) (None, 15, 31, 32) 288 input_1[0][0] \n",
"__________________________________________________________________________________________________\n",
"stem_bn1 (BatchNormalization) (None, 15, 31, 32) 128 stem_conv1[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation (Activation) (None, 15, 31, 32) 0 stem_bn1[0][0] \n",
"__________________________________________________________________________________________________\n",
"reduction_conv_1_stem_1 (Conv2D (None, 15, 31, 11) 352 activation[0][0] \n",
"__________________________________________________________________________________________________\n",
"reduction_bn_1_stem_1 (BatchNor (None, 15, 31, 11) 44 reduction_conv_1_stem_1[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_1 (Activation) (None, 15, 31, 11) 0 reduction_bn_1_stem_1[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_3 (Activation) (None, 15, 31, 32) 0 stem_bn1[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_pad_reduction_ (None, 19, 35, 11) 0 activation_1[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_pad_reduction_ (None, 21, 37, 32) 0 activation_3[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_reduction_left (None, 8, 16, 11) 396 separable_conv_1_pad_reduction_le\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_reduction_righ (None, 8, 16, 11) 1920 separable_conv_1_pad_reduction_ri\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_bn_reduction_l (None, 8, 16, 11) 44 separable_conv_1_reduction_left1_\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_bn_reduction_r (None, 8, 16, 11) 44 separable_conv_1_reduction_right1\n",
"__________________________________________________________________________________________________\n",
"activation_2 (Activation) (None, 8, 16, 11) 0 separable_conv_1_bn_reduction_lef\n",
"__________________________________________________________________________________________________\n",
"activation_4 (Activation) (None, 8, 16, 11) 0 separable_conv_1_bn_reduction_rig\n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_reduction_left (None, 8, 16, 11) 396 activation_2[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_reduction_righ (None, 8, 16, 11) 660 activation_4[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_5 (Activation) (None, 15, 31, 32) 0 stem_bn1[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_bn_reduction_l (None, 8, 16, 11) 44 separable_conv_2_reduction_left1_\n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_bn_reduction_r (None, 8, 16, 11) 44 separable_conv_2_reduction_right1\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_pad_reduction_ (None, 21, 37, 32) 0 activation_5[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_7 (Activation) (None, 15, 31, 32) 0 stem_bn1[0][0] \n",
"__________________________________________________________________________________________________\n",
"reduction_add_1_stem_1 (Add) (None, 8, 16, 11) 0 separable_conv_2_bn_reduction_lef\n",
" separable_conv_2_bn_reduction_rig\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_reduction_righ (None, 8, 16, 11) 1920 separable_conv_1_pad_reduction_ri\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_pad_reduction_ (None, 19, 35, 32) 0 activation_7[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_9 (Activation) (None, 8, 16, 11) 0 reduction_add_1_stem_1[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_bn_reduction_r (None, 8, 16, 11) 44 separable_conv_1_reduction_right2\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_reduction_righ (None, 8, 16, 11) 1152 separable_conv_1_pad_reduction_ri\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_reduction_left (None, 8, 16, 11) 220 activation_9[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_6 (Activation) (None, 8, 16, 11) 0 separable_conv_1_bn_reduction_rig\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_bn_reduction_r (None, 8, 16, 11) 44 separable_conv_1_reduction_right3\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_bn_reduction_l (None, 8, 16, 11) 44 separable_conv_1_reduction_left4_\n",
"__________________________________________________________________________________________________\n",
"reduction_pad_1_stem_1 (ZeroPad (None, 17, 33, 11) 0 reduction_bn_1_stem_1[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_reduction_righ (None, 8, 16, 11) 660 activation_6[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_8 (Activation) (None, 8, 16, 11) 0 separable_conv_1_bn_reduction_rig\n",
"__________________________________________________________________________________________________\n",
"activation_10 (Activation) (None, 8, 16, 11) 0 separable_conv_1_bn_reduction_lef\n",
"__________________________________________________________________________________________________\n",
"reduction_left2_stem_1 (MaxPool (None, 8, 16, 11) 0 reduction_pad_1_stem_1[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_bn_reduction_r (None, 8, 16, 11) 44 separable_conv_2_reduction_right2\n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_reduction_righ (None, 8, 16, 11) 396 activation_8[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_reduction_left (None, 8, 16, 11) 220 activation_10[0][0] \n",
"__________________________________________________________________________________________________\n",
"adjust_relu_1_stem_2 (Activatio (None, 15, 31, 32) 0 stem_bn1[0][0] \n",
"__________________________________________________________________________________________________\n",
"reduction_add_2_stem_1 (Add) (None, 8, 16, 11) 0 reduction_left2_stem_1[0][0] \n",
" separable_conv_2_bn_reduction_rig\n",
"__________________________________________________________________________________________________\n",
"reduction_left3_stem_1 (Average (None, 8, 16, 11) 0 reduction_pad_1_stem_1[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_bn_reduction_r (None, 8, 16, 11) 44 separable_conv_2_reduction_right3\n",
"__________________________________________________________________________________________________\n",
"reduction_left4_stem_1 (Average (None, 8, 16, 11) 0 reduction_add_1_stem_1[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_bn_reduction_l (None, 8, 16, 11) 44 separable_conv_2_reduction_left4_\n",
"__________________________________________________________________________________________________\n",
"reduction_right5_stem_1 (MaxPoo (None, 8, 16, 11) 0 reduction_pad_1_stem_1[0][0] \n",
"__________________________________________________________________________________________________\n",
"zero_padding2d (ZeroPadding2D) (None, 16, 32, 32) 0 adjust_relu_1_stem_2[0][0] \n",
"__________________________________________________________________________________________________\n",
"reduction_add3_stem_1 (Add) (None, 8, 16, 11) 0 reduction_left3_stem_1[0][0] \n",
" separable_conv_2_bn_reduction_rig\n",
"__________________________________________________________________________________________________\n",
"add (Add) (None, 8, 16, 11) 0 reduction_add_2_stem_1[0][0] \n",
" reduction_left4_stem_1[0][0] \n",
"__________________________________________________________________________________________________\n",
"reduction_add4_stem_1 (Add) (None, 8, 16, 11) 0 separable_conv_2_bn_reduction_lef\n",
" reduction_right5_stem_1[0][0] \n",
"__________________________________________________________________________________________________\n",
"cropping2d (Cropping2D) (None, 15, 31, 32) 0 zero_padding2d[0][0] \n",
"__________________________________________________________________________________________________\n",
"reduction_concat_stem_1 (Concat (None, 8, 16, 44) 0 reduction_add_2_stem_1[0][0] \n",
" reduction_add3_stem_1[0][0] \n",
" add[0][0] \n",
" reduction_add4_stem_1[0][0] \n",
"__________________________________________________________________________________________________\n",
"adjust_avg_pool_1_stem_2 (Avera (None, 8, 16, 32) 0 adjust_relu_1_stem_2[0][0] \n",
"__________________________________________________________________________________________________\n",
"adjust_avg_pool_2_stem_2 (Avera (None, 8, 16, 32) 0 cropping2d[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_11 (Activation) (None, 8, 16, 44) 0 reduction_concat_stem_1[0][0] \n",
"__________________________________________________________________________________________________\n",
"adjust_conv_1_stem_2 (Conv2D) (None, 8, 16, 11) 352 adjust_avg_pool_1_stem_2[0][0] \n",
"__________________________________________________________________________________________________\n",
"adjust_conv_2_stem_2 (Conv2D) (None, 8, 16, 11) 352 adjust_avg_pool_2_stem_2[0][0] \n",
"__________________________________________________________________________________________________\n",
"reduction_conv_1_stem_2 (Conv2D (None, 8, 16, 22) 968 activation_11[0][0] \n",
"__________________________________________________________________________________________________\n",
"concatenate (Concatenate) (None, 8, 16, 22) 0 adjust_conv_1_stem_2[0][0] \n",
" adjust_conv_2_stem_2[0][0] \n",
"__________________________________________________________________________________________________\n",
"reduction_bn_1_stem_2 (BatchNor (None, 8, 16, 22) 88 reduction_conv_1_stem_2[0][0] \n",
"__________________________________________________________________________________________________\n",
"adjust_bn_stem_2 (BatchNormaliz (None, 8, 16, 22) 88 concatenate[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_12 (Activation) (None, 8, 16, 22) 0 reduction_bn_1_stem_2[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_14 (Activation) (None, 8, 16, 22) 0 adjust_bn_stem_2[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_pad_reduction_ (None, 11, 19, 22) 0 activation_12[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_pad_reduction_ (None, 13, 21, 22) 0 activation_14[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_reduction_left (None, 4, 8, 22) 1034 separable_conv_1_pad_reduction_le\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_reduction_righ (None, 4, 8, 22) 1562 separable_conv_1_pad_reduction_ri\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_bn_reduction_l (None, 4, 8, 22) 88 separable_conv_1_reduction_left1_\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_bn_reduction_r (None, 4, 8, 22) 88 separable_conv_1_reduction_right1\n",
"__________________________________________________________________________________________________\n",
"activation_13 (Activation) (None, 4, 8, 22) 0 separable_conv_1_bn_reduction_lef\n",
"__________________________________________________________________________________________________\n",
"activation_15 (Activation) (None, 4, 8, 22) 0 separable_conv_1_bn_reduction_rig\n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_reduction_left (None, 4, 8, 22) 1034 activation_13[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_reduction_righ (None, 4, 8, 22) 1562 activation_15[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_16 (Activation) (None, 8, 16, 22) 0 adjust_bn_stem_2[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_bn_reduction_l (None, 4, 8, 22) 88 separable_conv_2_reduction_left1_\n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_bn_reduction_r (None, 4, 8, 22) 88 separable_conv_2_reduction_right1\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_pad_reduction_ (None, 13, 21, 22) 0 activation_16[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_18 (Activation) (None, 8, 16, 22) 0 adjust_bn_stem_2[0][0] \n",
"__________________________________________________________________________________________________\n",
"reduction_add_1_stem_2 (Add) (None, 4, 8, 22) 0 separable_conv_2_bn_reduction_lef\n",
" separable_conv_2_bn_reduction_rig\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_reduction_righ (None, 4, 8, 22) 1562 separable_conv_1_pad_reduction_ri\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_pad_reduction_ (None, 11, 19, 22) 0 activation_18[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_20 (Activation) (None, 4, 8, 22) 0 reduction_add_1_stem_2[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_bn_reduction_r (None, 4, 8, 22) 88 separable_conv_1_reduction_right2\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_reduction_righ (None, 4, 8, 22) 1034 separable_conv_1_pad_reduction_ri\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_reduction_left (None, 4, 8, 22) 682 activation_20[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_17 (Activation) (None, 4, 8, 22) 0 separable_conv_1_bn_reduction_rig\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_bn_reduction_r (None, 4, 8, 22) 88 separable_conv_1_reduction_right3\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_bn_reduction_l (None, 4, 8, 22) 88 separable_conv_1_reduction_left4_\n",
"__________________________________________________________________________________________________\n",
"reduction_pad_1_stem_2 (ZeroPad (None, 9, 17, 22) 0 reduction_bn_1_stem_2[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_reduction_righ (None, 4, 8, 22) 1562 activation_17[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_19 (Activation) (None, 4, 8, 22) 0 separable_conv_1_bn_reduction_rig\n",
"__________________________________________________________________________________________________\n",
"activation_21 (Activation) (None, 4, 8, 22) 0 separable_conv_1_bn_reduction_lef\n",
"__________________________________________________________________________________________________\n",
"reduction_left2_stem_2 (MaxPool (None, 4, 8, 22) 0 reduction_pad_1_stem_2[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_bn_reduction_r (None, 4, 8, 22) 88 separable_conv_2_reduction_right2\n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_reduction_righ (None, 4, 8, 22) 1034 activation_19[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_reduction_left (None, 4, 8, 22) 682 activation_21[0][0] \n",
"__________________________________________________________________________________________________\n",
"adjust_relu_1_0 (Activation) (None, 8, 16, 44) 0 reduction_concat_stem_1[0][0] \n",
"__________________________________________________________________________________________________\n",
"reduction_add_2_stem_2 (Add) (None, 4, 8, 22) 0 reduction_left2_stem_2[0][0] \n",
" separable_conv_2_bn_reduction_rig\n",
"__________________________________________________________________________________________________\n",
"reduction_left3_stem_2 (Average (None, 4, 8, 22) 0 reduction_pad_1_stem_2[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_bn_reduction_r (None, 4, 8, 22) 88 separable_conv_2_reduction_right3\n",
"__________________________________________________________________________________________________\n",
"reduction_left4_stem_2 (Average (None, 4, 8, 22) 0 reduction_add_1_stem_2[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_bn_reduction_l (None, 4, 8, 22) 88 separable_conv_2_reduction_left4_\n",
"__________________________________________________________________________________________________\n",
"reduction_right5_stem_2 (MaxPoo (None, 4, 8, 22) 0 reduction_pad_1_stem_2[0][0] \n",
"__________________________________________________________________________________________________\n",
"zero_padding2d_1 (ZeroPadding2D (None, 9, 17, 44) 0 adjust_relu_1_0[0][0] \n",
"__________________________________________________________________________________________________\n",
"reduction_add3_stem_2 (Add) (None, 4, 8, 22) 0 reduction_left3_stem_2[0][0] \n",
" separable_conv_2_bn_reduction_rig\n",
"__________________________________________________________________________________________________\n",
"add_1 (Add) (None, 4, 8, 22) 0 reduction_add_2_stem_2[0][0] \n",
" reduction_left4_stem_2[0][0] \n",
"__________________________________________________________________________________________________\n",
"reduction_add4_stem_2 (Add) (None, 4, 8, 22) 0 separable_conv_2_bn_reduction_lef\n",
" reduction_right5_stem_2[0][0] \n",
"__________________________________________________________________________________________________\n",
"cropping2d_1 (Cropping2D) (None, 8, 16, 44) 0 zero_padding2d_1[0][0] \n",
"__________________________________________________________________________________________________\n",
"reduction_concat_stem_2 (Concat (None, 4, 8, 88) 0 reduction_add_2_stem_2[0][0] \n",
" reduction_add3_stem_2[0][0] \n",
" add_1[0][0] \n",
" reduction_add4_stem_2[0][0] \n",
"__________________________________________________________________________________________________\n",
"adjust_avg_pool_1_0 (AveragePoo (None, 4, 8, 44) 0 adjust_relu_1_0[0][0] \n",
"__________________________________________________________________________________________________\n",
"adjust_avg_pool_2_0 (AveragePoo (None, 4, 8, 44) 0 cropping2d_1[0][0] \n",
"__________________________________________________________________________________________________\n",
"adjust_conv_1_0 (Conv2D) (None, 4, 8, 22) 968 adjust_avg_pool_1_0[0][0] \n",
"__________________________________________________________________________________________________\n",
"adjust_conv_2_0 (Conv2D) (None, 4, 8, 22) 968 adjust_avg_pool_2_0[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_22 (Activation) (None, 4, 8, 88) 0 reduction_concat_stem_2[0][0] \n",
"__________________________________________________________________________________________________\n",
"concatenate_1 (Concatenate) (None, 4, 8, 44) 0 adjust_conv_1_0[0][0] \n",
" adjust_conv_2_0[0][0] \n",
"__________________________________________________________________________________________________\n",
"normal_conv_1_0 (Conv2D) (None, 4, 8, 44) 3872 activation_22[0][0] \n",
"__________________________________________________________________________________________________\n",
"adjust_bn_0 (BatchNormalization (None, 4, 8, 44) 176 concatenate_1[0][0] \n",
"__________________________________________________________________________________________________\n",
"normal_bn_1_0 (BatchNormalizati (None, 4, 8, 44) 176 normal_conv_1_0[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_23 (Activation) (None, 4, 8, 44) 0 normal_bn_1_0[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_25 (Activation) (None, 4, 8, 44) 0 adjust_bn_0[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_27 (Activation) (None, 4, 8, 44) 0 adjust_bn_0[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_29 (Activation) (None, 4, 8, 44) 0 adjust_bn_0[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_31 (Activation) (None, 4, 8, 44) 0 normal_bn_1_0[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_normal_left1_0 (None, 4, 8, 44) 3036 activation_23[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_normal_right1_ (None, 4, 8, 44) 2332 activation_25[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_normal_left2_0 (None, 4, 8, 44) 3036 activation_27[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_normal_right2_ (None, 4, 8, 44) 2332 activation_29[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_normal_left5_0 (None, 4, 8, 44) 2332 activation_31[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_bn_normal_left (None, 4, 8, 44) 176 separable_conv_1_normal_left1_0[0\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_bn_normal_righ (None, 4, 8, 44) 176 separable_conv_1_normal_right1_0[\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_bn_normal_left (None, 4, 8, 44) 176 separable_conv_1_normal_left2_0[0\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_bn_normal_righ (None, 4, 8, 44) 176 separable_conv_1_normal_right2_0[\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_bn_normal_left (None, 4, 8, 44) 176 separable_conv_1_normal_left5_0[0\n",
"__________________________________________________________________________________________________\n",
"activation_24 (Activation) (None, 4, 8, 44) 0 separable_conv_1_bn_normal_left1_\n",
"__________________________________________________________________________________________________\n",
"activation_26 (Activation) (None, 4, 8, 44) 0 separable_conv_1_bn_normal_right1\n",
"__________________________________________________________________________________________________\n",
"activation_28 (Activation) (None, 4, 8, 44) 0 separable_conv_1_bn_normal_left2_\n",
"__________________________________________________________________________________________________\n",
"activation_30 (Activation) (None, 4, 8, 44) 0 separable_conv_1_bn_normal_right2\n",
"__________________________________________________________________________________________________\n",
"activation_32 (Activation) (None, 4, 8, 44) 0 separable_conv_1_bn_normal_left5_\n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_normal_left1_0 (None, 4, 8, 44) 3036 activation_24[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_normal_right1_ (None, 4, 8, 44) 2332 activation_26[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_normal_left2_0 (None, 4, 8, 44) 3036 activation_28[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_normal_right2_ (None, 4, 8, 44) 2332 activation_30[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_normal_left5_0 (None, 4, 8, 44) 2332 activation_32[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_bn_normal_left (None, 4, 8, 44) 176 separable_conv_2_normal_left1_0[0\n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_bn_normal_righ (None, 4, 8, 44) 176 separable_conv_2_normal_right1_0[\n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_bn_normal_left (None, 4, 8, 44) 176 separable_conv_2_normal_left2_0[0\n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_bn_normal_righ (None, 4, 8, 44) 176 separable_conv_2_normal_right2_0[\n",
"__________________________________________________________________________________________________\n",
"normal_left3_0 (AveragePooling2 (None, 4, 8, 44) 0 normal_bn_1_0[0][0] \n",
"__________________________________________________________________________________________________\n",
"normal_left4_0 (AveragePooling2 (None, 4, 8, 44) 0 adjust_bn_0[0][0] \n",
"__________________________________________________________________________________________________\n",
"normal_right4_0 (AveragePooling (None, 4, 8, 44) 0 adjust_bn_0[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_bn_normal_left (None, 4, 8, 44) 176 separable_conv_2_normal_left5_0[0\n",
"__________________________________________________________________________________________________\n",
"normal_add_1_0 (Add) (None, 4, 8, 44) 0 separable_conv_2_bn_normal_left1_\n",
" separable_conv_2_bn_normal_right1\n",
"__________________________________________________________________________________________________\n",
"normal_add_2_0 (Add) (None, 4, 8, 44) 0 separable_conv_2_bn_normal_left2_\n",
" separable_conv_2_bn_normal_right2\n",
"__________________________________________________________________________________________________\n",
"normal_add_3_0 (Add) (None, 4, 8, 44) 0 normal_left3_0[0][0] \n",
" adjust_bn_0[0][0] \n",
"__________________________________________________________________________________________________\n",
"normal_add_4_0 (Add) (None, 4, 8, 44) 0 normal_left4_0[0][0] \n",
" normal_right4_0[0][0] \n",
"__________________________________________________________________________________________________\n",
"normal_add_5_0 (Add) (None, 4, 8, 44) 0 separable_conv_2_bn_normal_left5_\n",
" normal_bn_1_0[0][0] \n",
"__________________________________________________________________________________________________\n",
"normal_concat_0 (Concatenate) (None, 4, 8, 264) 0 adjust_bn_0[0][0] \n",
" normal_add_1_0[0][0] \n",
" normal_add_2_0[0][0] \n",
" normal_add_3_0[0][0] \n",
" normal_add_4_0[0][0] \n",
" normal_add_5_0[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_33 (Activation) (None, 4, 8, 88) 0 reduction_concat_stem_2[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_34 (Activation) (None, 4, 8, 264) 0 normal_concat_0[0][0] \n",
"__________________________________________________________________________________________________\n",
"adjust_conv_projection_1 (Conv2 (None, 4, 8, 44) 3872 activation_33[0][0] \n",
"__________________________________________________________________________________________________\n",
"normal_conv_1_1 (Conv2D) (None, 4, 8, 44) 11616 activation_34[0][0] \n",
"__________________________________________________________________________________________________\n",
"adjust_bn_1 (BatchNormalization (None, 4, 8, 44) 176 adjust_conv_projection_1[0][0] \n",
"__________________________________________________________________________________________________\n",
"normal_bn_1_1 (BatchNormalizati (None, 4, 8, 44) 176 normal_conv_1_1[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_35 (Activation) (None, 4, 8, 44) 0 normal_bn_1_1[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_37 (Activation) (None, 4, 8, 44) 0 adjust_bn_1[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_39 (Activation) (None, 4, 8, 44) 0 adjust_bn_1[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_41 (Activation) (None, 4, 8, 44) 0 adjust_bn_1[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_43 (Activation) (None, 4, 8, 44) 0 normal_bn_1_1[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_normal_left1_1 (None, 4, 8, 44) 3036 activation_35[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_normal_right1_ (None, 4, 8, 44) 2332 activation_37[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_normal_left2_1 (None, 4, 8, 44) 3036 activation_39[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_normal_right2_ (None, 4, 8, 44) 2332 activation_41[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_normal_left5_1 (None, 4, 8, 44) 2332 activation_43[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_bn_normal_left (None, 4, 8, 44) 176 separable_conv_1_normal_left1_1[0\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_bn_normal_righ (None, 4, 8, 44) 176 separable_conv_1_normal_right1_1[\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_bn_normal_left (None, 4, 8, 44) 176 separable_conv_1_normal_left2_1[0\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_bn_normal_righ (None, 4, 8, 44) 176 separable_conv_1_normal_right2_1[\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_bn_normal_left (None, 4, 8, 44) 176 separable_conv_1_normal_left5_1[0\n",
"__________________________________________________________________________________________________\n",
"activation_36 (Activation) (None, 4, 8, 44) 0 separable_conv_1_bn_normal_left1_\n",
"__________________________________________________________________________________________________\n",
"activation_38 (Activation) (None, 4, 8, 44) 0 separable_conv_1_bn_normal_right1\n",
"__________________________________________________________________________________________________\n",
"activation_40 (Activation) (None, 4, 8, 44) 0 separable_conv_1_bn_normal_left2_\n",
"__________________________________________________________________________________________________\n",
"activation_42 (Activation) (None, 4, 8, 44) 0 separable_conv_1_bn_normal_right2\n",
"__________________________________________________________________________________________________\n",
"activation_44 (Activation) (None, 4, 8, 44) 0 separable_conv_1_bn_normal_left5_\n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_normal_left1_1 (None, 4, 8, 44) 3036 activation_36[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_normal_right1_ (None, 4, 8, 44) 2332 activation_38[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_normal_left2_1 (None, 4, 8, 44) 3036 activation_40[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_normal_right2_ (None, 4, 8, 44) 2332 activation_42[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_normal_left5_1 (None, 4, 8, 44) 2332 activation_44[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_bn_normal_left (None, 4, 8, 44) 176 separable_conv_2_normal_left1_1[0\n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_bn_normal_righ (None, 4, 8, 44) 176 separable_conv_2_normal_right1_1[\n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_bn_normal_left (None, 4, 8, 44) 176 separable_conv_2_normal_left2_1[0\n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_bn_normal_righ (None, 4, 8, 44) 176 separable_conv_2_normal_right2_1[\n",
"__________________________________________________________________________________________________\n",
"normal_left3_1 (AveragePooling2 (None, 4, 8, 44) 0 normal_bn_1_1[0][0] \n",
"__________________________________________________________________________________________________\n",
"normal_left4_1 (AveragePooling2 (None, 4, 8, 44) 0 adjust_bn_1[0][0] \n",
"__________________________________________________________________________________________________\n",
"normal_right4_1 (AveragePooling (None, 4, 8, 44) 0 adjust_bn_1[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_bn_normal_left (None, 4, 8, 44) 176 separable_conv_2_normal_left5_1[0\n",
"__________________________________________________________________________________________________\n",
"normal_add_1_1 (Add) (None, 4, 8, 44) 0 separable_conv_2_bn_normal_left1_\n",
" separable_conv_2_bn_normal_right1\n",
"__________________________________________________________________________________________________\n",
"normal_add_2_1 (Add) (None, 4, 8, 44) 0 separable_conv_2_bn_normal_left2_\n",
" separable_conv_2_bn_normal_right2\n",
"__________________________________________________________________________________________________\n",
"normal_add_3_1 (Add) (None, 4, 8, 44) 0 normal_left3_1[0][0] \n",
" adjust_bn_1[0][0] \n",
"__________________________________________________________________________________________________\n",
"normal_add_4_1 (Add) (None, 4, 8, 44) 0 normal_left4_1[0][0] \n",
" normal_right4_1[0][0] \n",
"__________________________________________________________________________________________________\n",
"normal_add_5_1 (Add) (None, 4, 8, 44) 0 separable_conv_2_bn_normal_left5_\n",
" normal_bn_1_1[0][0] \n",
"__________________________________________________________________________________________________\n",
"normal_concat_1 (Concatenate) (None, 4, 8, 264) 0 adjust_bn_1[0][0] \n",
" normal_add_1_1[0][0] \n",
" normal_add_2_1[0][0] \n",
" normal_add_3_1[0][0] \n",
" normal_add_4_1[0][0] \n",
" normal_add_5_1[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_45 (Activation) (None, 4, 8, 264) 0 normal_concat_0[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_46 (Activation) (None, 4, 8, 264) 0 normal_concat_1[0][0] \n",
"__________________________________________________________________________________________________\n",
"adjust_conv_projection_2 (Conv2 (None, 4, 8, 44) 11616 activation_45[0][0] \n",
"__________________________________________________________________________________________________\n",
"normal_conv_1_2 (Conv2D) (None, 4, 8, 44) 11616 activation_46[0][0] \n",
"__________________________________________________________________________________________________\n",
"adjust_bn_2 (BatchNormalization (None, 4, 8, 44) 176 adjust_conv_projection_2[0][0] \n",
"__________________________________________________________________________________________________\n",
"normal_bn_1_2 (BatchNormalizati (None, 4, 8, 44) 176 normal_conv_1_2[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_47 (Activation) (None, 4, 8, 44) 0 normal_bn_1_2[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_49 (Activation) (None, 4, 8, 44) 0 adjust_bn_2[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_51 (Activation) (None, 4, 8, 44) 0 adjust_bn_2[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_53 (Activation) (None, 4, 8, 44) 0 adjust_bn_2[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_55 (Activation) (None, 4, 8, 44) 0 normal_bn_1_2[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_normal_left1_2 (None, 4, 8, 44) 3036 activation_47[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_normal_right1_ (None, 4, 8, 44) 2332 activation_49[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_normal_left2_2 (None, 4, 8, 44) 3036 activation_51[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_normal_right2_ (None, 4, 8, 44) 2332 activation_53[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_normal_left5_2 (None, 4, 8, 44) 2332 activation_55[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_bn_normal_left (None, 4, 8, 44) 176 separable_conv_1_normal_left1_2[0\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_bn_normal_righ (None, 4, 8, 44) 176 separable_conv_1_normal_right1_2[\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_bn_normal_left (None, 4, 8, 44) 176 separable_conv_1_normal_left2_2[0\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_bn_normal_righ (None, 4, 8, 44) 176 separable_conv_1_normal_right2_2[\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_bn_normal_left (None, 4, 8, 44) 176 separable_conv_1_normal_left5_2[0\n",
"__________________________________________________________________________________________________\n",
"activation_48 (Activation) (None, 4, 8, 44) 0 separable_conv_1_bn_normal_left1_\n",
"__________________________________________________________________________________________________\n",
"activation_50 (Activation) (None, 4, 8, 44) 0 separable_conv_1_bn_normal_right1\n",
"__________________________________________________________________________________________________\n",
"activation_52 (Activation) (None, 4, 8, 44) 0 separable_conv_1_bn_normal_left2_\n",
"__________________________________________________________________________________________________\n",
"activation_54 (Activation) (None, 4, 8, 44) 0 separable_conv_1_bn_normal_right2\n",
"__________________________________________________________________________________________________\n",
"activation_56 (Activation) (None, 4, 8, 44) 0 separable_conv_1_bn_normal_left5_\n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_normal_left1_2 (None, 4, 8, 44) 3036 activation_48[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_normal_right1_ (None, 4, 8, 44) 2332 activation_50[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_normal_left2_2 (None, 4, 8, 44) 3036 activation_52[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_normal_right2_ (None, 4, 8, 44) 2332 activation_54[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_normal_left5_2 (None, 4, 8, 44) 2332 activation_56[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_bn_normal_left (None, 4, 8, 44) 176 separable_conv_2_normal_left1_2[0\n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_bn_normal_righ (None, 4, 8, 44) 176 separable_conv_2_normal_right1_2[\n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_bn_normal_left (None, 4, 8, 44) 176 separable_conv_2_normal_left2_2[0\n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_bn_normal_righ (None, 4, 8, 44) 176 separable_conv_2_normal_right2_2[\n",
"__________________________________________________________________________________________________\n",
"normal_left3_2 (AveragePooling2 (None, 4, 8, 44) 0 normal_bn_1_2[0][0] \n",
"__________________________________________________________________________________________________\n",
"normal_left4_2 (AveragePooling2 (None, 4, 8, 44) 0 adjust_bn_2[0][0] \n",
"__________________________________________________________________________________________________\n",
"normal_right4_2 (AveragePooling (None, 4, 8, 44) 0 adjust_bn_2[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_bn_normal_left (None, 4, 8, 44) 176 separable_conv_2_normal_left5_2[0\n",
"__________________________________________________________________________________________________\n",
"normal_add_1_2 (Add) (None, 4, 8, 44) 0 separable_conv_2_bn_normal_left1_\n",
" separable_conv_2_bn_normal_right1\n",
"__________________________________________________________________________________________________\n",
"normal_add_2_2 (Add) (None, 4, 8, 44) 0 separable_conv_2_bn_normal_left2_\n",
" separable_conv_2_bn_normal_right2\n",
"__________________________________________________________________________________________________\n",
"normal_add_3_2 (Add) (None, 4, 8, 44) 0 normal_left3_2[0][0] \n",
" adjust_bn_2[0][0] \n",
"__________________________________________________________________________________________________\n",
"normal_add_4_2 (Add) (None, 4, 8, 44) 0 normal_left4_2[0][0] \n",
" normal_right4_2[0][0] \n",
"__________________________________________________________________________________________________\n",
"normal_add_5_2 (Add) (None, 4, 8, 44) 0 separable_conv_2_bn_normal_left5_\n",
" normal_bn_1_2[0][0] \n",
"__________________________________________________________________________________________________\n",
"normal_concat_2 (Concatenate) (None, 4, 8, 264) 0 adjust_bn_2[0][0] \n",
" normal_add_1_2[0][0] \n",
" normal_add_2_2[0][0] \n",
" normal_add_3_2[0][0] \n",
" normal_add_4_2[0][0] \n",
" normal_add_5_2[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_57 (Activation) (None, 4, 8, 264) 0 normal_concat_1[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_58 (Activation) (None, 4, 8, 264) 0 normal_concat_2[0][0] \n",
"__________________________________________________________________________________________________\n",
"adjust_conv_projection_3 (Conv2 (None, 4, 8, 44) 11616 activation_57[0][0] \n",
"__________________________________________________________________________________________________\n",
"normal_conv_1_3 (Conv2D) (None, 4, 8, 44) 11616 activation_58[0][0] \n",
"__________________________________________________________________________________________________\n",
"adjust_bn_3 (BatchNormalization (None, 4, 8, 44) 176 adjust_conv_projection_3[0][0] \n",
"__________________________________________________________________________________________________\n",
"normal_bn_1_3 (BatchNormalizati (None, 4, 8, 44) 176 normal_conv_1_3[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_59 (Activation) (None, 4, 8, 44) 0 normal_bn_1_3[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_61 (Activation) (None, 4, 8, 44) 0 adjust_bn_3[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_63 (Activation) (None, 4, 8, 44) 0 adjust_bn_3[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_65 (Activation) (None, 4, 8, 44) 0 adjust_bn_3[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_67 (Activation) (None, 4, 8, 44) 0 normal_bn_1_3[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_normal_left1_3 (None, 4, 8, 44) 3036 activation_59[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_normal_right1_ (None, 4, 8, 44) 2332 activation_61[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_normal_left2_3 (None, 4, 8, 44) 3036 activation_63[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_normal_right2_ (None, 4, 8, 44) 2332 activation_65[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_normal_left5_3 (None, 4, 8, 44) 2332 activation_67[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_bn_normal_left (None, 4, 8, 44) 176 separable_conv_1_normal_left1_3[0\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_bn_normal_righ (None, 4, 8, 44) 176 separable_conv_1_normal_right1_3[\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_bn_normal_left (None, 4, 8, 44) 176 separable_conv_1_normal_left2_3[0\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_bn_normal_righ (None, 4, 8, 44) 176 separable_conv_1_normal_right2_3[\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_bn_normal_left (None, 4, 8, 44) 176 separable_conv_1_normal_left5_3[0\n",
"__________________________________________________________________________________________________\n",
"activation_60 (Activation) (None, 4, 8, 44) 0 separable_conv_1_bn_normal_left1_\n",
"__________________________________________________________________________________________________\n",
"activation_62 (Activation) (None, 4, 8, 44) 0 separable_conv_1_bn_normal_right1\n",
"__________________________________________________________________________________________________\n",
"activation_64 (Activation) (None, 4, 8, 44) 0 separable_conv_1_bn_normal_left2_\n",
"__________________________________________________________________________________________________\n",
"activation_66 (Activation) (None, 4, 8, 44) 0 separable_conv_1_bn_normal_right2\n",
"__________________________________________________________________________________________________\n",
"activation_68 (Activation) (None, 4, 8, 44) 0 separable_conv_1_bn_normal_left5_\n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_normal_left1_3 (None, 4, 8, 44) 3036 activation_60[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_normal_right1_ (None, 4, 8, 44) 2332 activation_62[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_normal_left2_3 (None, 4, 8, 44) 3036 activation_64[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_normal_right2_ (None, 4, 8, 44) 2332 activation_66[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_normal_left5_3 (None, 4, 8, 44) 2332 activation_68[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_bn_normal_left (None, 4, 8, 44) 176 separable_conv_2_normal_left1_3[0\n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_bn_normal_righ (None, 4, 8, 44) 176 separable_conv_2_normal_right1_3[\n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_bn_normal_left (None, 4, 8, 44) 176 separable_conv_2_normal_left2_3[0\n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_bn_normal_righ (None, 4, 8, 44) 176 separable_conv_2_normal_right2_3[\n",
"__________________________________________________________________________________________________\n",
"normal_left3_3 (AveragePooling2 (None, 4, 8, 44) 0 normal_bn_1_3[0][0] \n",
"__________________________________________________________________________________________________\n",
"normal_left4_3 (AveragePooling2 (None, 4, 8, 44) 0 adjust_bn_3[0][0] \n",
"__________________________________________________________________________________________________\n",
"normal_right4_3 (AveragePooling (None, 4, 8, 44) 0 adjust_bn_3[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_bn_normal_left (None, 4, 8, 44) 176 separable_conv_2_normal_left5_3[0\n",
"__________________________________________________________________________________________________\n",
"normal_add_1_3 (Add) (None, 4, 8, 44) 0 separable_conv_2_bn_normal_left1_\n",
" separable_conv_2_bn_normal_right1\n",
"__________________________________________________________________________________________________\n",
"normal_add_2_3 (Add) (None, 4, 8, 44) 0 separable_conv_2_bn_normal_left2_\n",
" separable_conv_2_bn_normal_right2\n",
"__________________________________________________________________________________________________\n",
"normal_add_3_3 (Add) (None, 4, 8, 44) 0 normal_left3_3[0][0] \n",
" adjust_bn_3[0][0] \n",
"__________________________________________________________________________________________________\n",
"normal_add_4_3 (Add) (None, 4, 8, 44) 0 normal_left4_3[0][0] \n",
" normal_right4_3[0][0] \n",
"__________________________________________________________________________________________________\n",
"normal_add_5_3 (Add) (None, 4, 8, 44) 0 separable_conv_2_bn_normal_left5_\n",
" normal_bn_1_3[0][0] \n",
"__________________________________________________________________________________________________\n",
"normal_concat_3 (Concatenate) (None, 4, 8, 264) 0 adjust_bn_3[0][0] \n",
" normal_add_1_3[0][0] \n",
" normal_add_2_3[0][0] \n",
" normal_add_3_3[0][0] \n",
" normal_add_4_3[0][0] \n",
" normal_add_5_3[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_70 (Activation) (None, 4, 8, 264) 0 normal_concat_3[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_69 (Activation) (None, 4, 8, 264) 0 normal_concat_2[0][0] \n",
"__________________________________________________________________________________________________\n",
"reduction_conv_1_reduce_4 (Conv (None, 4, 8, 88) 23232 activation_70[0][0] \n",
"__________________________________________________________________________________________________\n",
"adjust_conv_projection_reduce_4 (None, 4, 8, 88) 23232 activation_69[0][0] \n",
"__________________________________________________________________________________________________\n",
"reduction_bn_1_reduce_4 (BatchN (None, 4, 8, 88) 352 reduction_conv_1_reduce_4[0][0] \n",
"__________________________________________________________________________________________________\n",
"adjust_bn_reduce_4 (BatchNormal (None, 4, 8, 88) 352 adjust_conv_projection_reduce_4[0\n",
"__________________________________________________________________________________________________\n",
"activation_71 (Activation) (None, 4, 8, 88) 0 reduction_bn_1_reduce_4[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_73 (Activation) (None, 4, 8, 88) 0 adjust_bn_reduce_4[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_pad_reduction_ (None, 7, 11, 88) 0 activation_71[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_pad_reduction_ (None, 9, 13, 88) 0 activation_73[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_reduction_left (None, 2, 4, 88) 9944 separable_conv_1_pad_reduction_le\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_reduction_righ (None, 2, 4, 88) 12056 separable_conv_1_pad_reduction_ri\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_bn_reduction_l (None, 2, 4, 88) 352 separable_conv_1_reduction_left1_\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_bn_reduction_r (None, 2, 4, 88) 352 separable_conv_1_reduction_right1\n",
"__________________________________________________________________________________________________\n",
"activation_72 (Activation) (None, 2, 4, 88) 0 separable_conv_1_bn_reduction_lef\n",
"__________________________________________________________________________________________________\n",
"activation_74 (Activation) (None, 2, 4, 88) 0 separable_conv_1_bn_reduction_rig\n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_reduction_left (None, 2, 4, 88) 9944 activation_72[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_reduction_righ (None, 2, 4, 88) 12056 activation_74[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_75 (Activation) (None, 4, 8, 88) 0 adjust_bn_reduce_4[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_bn_reduction_l (None, 2, 4, 88) 352 separable_conv_2_reduction_left1_\n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_bn_reduction_r (None, 2, 4, 88) 352 separable_conv_2_reduction_right1\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_pad_reduction_ (None, 9, 13, 88) 0 activation_75[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_77 (Activation) (None, 4, 8, 88) 0 adjust_bn_reduce_4[0][0] \n",
"__________________________________________________________________________________________________\n",
"reduction_add_1_reduce_4 (Add) (None, 2, 4, 88) 0 separable_conv_2_bn_reduction_lef\n",
" separable_conv_2_bn_reduction_rig\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_reduction_righ (None, 2, 4, 88) 12056 separable_conv_1_pad_reduction_ri\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_pad_reduction_ (None, 7, 11, 88) 0 activation_77[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_79 (Activation) (None, 2, 4, 88) 0 reduction_add_1_reduce_4[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_bn_reduction_r (None, 2, 4, 88) 352 separable_conv_1_reduction_right2\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_reduction_righ (None, 2, 4, 88) 9944 separable_conv_1_pad_reduction_ri\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_reduction_left (None, 2, 4, 88) 8536 activation_79[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_76 (Activation) (None, 2, 4, 88) 0 separable_conv_1_bn_reduction_rig\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_bn_reduction_r (None, 2, 4, 88) 352 separable_conv_1_reduction_right3\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_bn_reduction_l (None, 2, 4, 88) 352 separable_conv_1_reduction_left4_\n",
"__________________________________________________________________________________________________\n",
"reduction_pad_1_reduce_4 (ZeroP (None, 5, 9, 88) 0 reduction_bn_1_reduce_4[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_reduction_righ (None, 2, 4, 88) 12056 activation_76[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_78 (Activation) (None, 2, 4, 88) 0 separable_conv_1_bn_reduction_rig\n",
"__________________________________________________________________________________________________\n",
"activation_80 (Activation) (None, 2, 4, 88) 0 separable_conv_1_bn_reduction_lef\n",
"__________________________________________________________________________________________________\n",
"reduction_left2_reduce_4 (MaxPo (None, 2, 4, 88) 0 reduction_pad_1_reduce_4[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_bn_reduction_r (None, 2, 4, 88) 352 separable_conv_2_reduction_right2\n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_reduction_righ (None, 2, 4, 88) 9944 activation_78[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_reduction_left (None, 2, 4, 88) 8536 activation_80[0][0] \n",
"__________________________________________________________________________________________________\n",
"adjust_relu_1_5 (Activation) (None, 4, 8, 264) 0 normal_concat_3[0][0] \n",
"__________________________________________________________________________________________________\n",
"reduction_add_2_reduce_4 (Add) (None, 2, 4, 88) 0 reduction_left2_reduce_4[0][0] \n",
" separable_conv_2_bn_reduction_rig\n",
"__________________________________________________________________________________________________\n",
"reduction_left3_reduce_4 (Avera (None, 2, 4, 88) 0 reduction_pad_1_reduce_4[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_bn_reduction_r (None, 2, 4, 88) 352 separable_conv_2_reduction_right3\n",
"__________________________________________________________________________________________________\n",
"reduction_left4_reduce_4 (Avera (None, 2, 4, 88) 0 reduction_add_1_reduce_4[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_bn_reduction_l (None, 2, 4, 88) 352 separable_conv_2_reduction_left4_\n",
"__________________________________________________________________________________________________\n",
"reduction_right5_reduce_4 (MaxP (None, 2, 4, 88) 0 reduction_pad_1_reduce_4[0][0] \n",
"__________________________________________________________________________________________________\n",
"zero_padding2d_2 (ZeroPadding2D (None, 5, 9, 264) 0 adjust_relu_1_5[0][0] \n",
"__________________________________________________________________________________________________\n",
"reduction_add3_reduce_4 (Add) (None, 2, 4, 88) 0 reduction_left3_reduce_4[0][0] \n",
" separable_conv_2_bn_reduction_rig\n",
"__________________________________________________________________________________________________\n",
"add_2 (Add) (None, 2, 4, 88) 0 reduction_add_2_reduce_4[0][0] \n",
" reduction_left4_reduce_4[0][0] \n",
"__________________________________________________________________________________________________\n",
"reduction_add4_reduce_4 (Add) (None, 2, 4, 88) 0 separable_conv_2_bn_reduction_lef\n",
" reduction_right5_reduce_4[0][0] \n",
"__________________________________________________________________________________________________\n",
"cropping2d_2 (Cropping2D) (None, 4, 8, 264) 0 zero_padding2d_2[0][0] \n",
"__________________________________________________________________________________________________\n",
"reduction_concat_reduce_4 (Conc (None, 2, 4, 352) 0 reduction_add_2_reduce_4[0][0] \n",
" reduction_add3_reduce_4[0][0] \n",
" add_2[0][0] \n",
" reduction_add4_reduce_4[0][0] \n",
"__________________________________________________________________________________________________\n",
"adjust_avg_pool_1_5 (AveragePoo (None, 2, 4, 264) 0 adjust_relu_1_5[0][0] \n",
"__________________________________________________________________________________________________\n",
"adjust_avg_pool_2_5 (AveragePoo (None, 2, 4, 264) 0 cropping2d_2[0][0] \n",
"__________________________________________________________________________________________________\n",
"adjust_conv_1_5 (Conv2D) (None, 2, 4, 44) 11616 adjust_avg_pool_1_5[0][0] \n",
"__________________________________________________________________________________________________\n",
"adjust_conv_2_5 (Conv2D) (None, 2, 4, 44) 11616 adjust_avg_pool_2_5[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_81 (Activation) (None, 2, 4, 352) 0 reduction_concat_reduce_4[0][0] \n",
"__________________________________________________________________________________________________\n",
"concatenate_2 (Concatenate) (None, 2, 4, 88) 0 adjust_conv_1_5[0][0] \n",
" adjust_conv_2_5[0][0] \n",
"__________________________________________________________________________________________________\n",
"normal_conv_1_5 (Conv2D) (None, 2, 4, 88) 30976 activation_81[0][0] \n",
"__________________________________________________________________________________________________\n",
"adjust_bn_5 (BatchNormalization (None, 2, 4, 88) 352 concatenate_2[0][0] \n",
"__________________________________________________________________________________________________\n",
"normal_bn_1_5 (BatchNormalizati (None, 2, 4, 88) 352 normal_conv_1_5[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_82 (Activation) (None, 2, 4, 88) 0 normal_bn_1_5[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_84 (Activation) (None, 2, 4, 88) 0 adjust_bn_5[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_86 (Activation) (None, 2, 4, 88) 0 adjust_bn_5[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_88 (Activation) (None, 2, 4, 88) 0 adjust_bn_5[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_90 (Activation) (None, 2, 4, 88) 0 normal_bn_1_5[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_normal_left1_5 (None, 2, 4, 88) 9944 activation_82[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_normal_right1_ (None, 2, 4, 88) 8536 activation_84[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_normal_left2_5 (None, 2, 4, 88) 9944 activation_86[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_normal_right2_ (None, 2, 4, 88) 8536 activation_88[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_normal_left5_5 (None, 2, 4, 88) 8536 activation_90[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_bn_normal_left (None, 2, 4, 88) 352 separable_conv_1_normal_left1_5[0\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_bn_normal_righ (None, 2, 4, 88) 352 separable_conv_1_normal_right1_5[\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_bn_normal_left (None, 2, 4, 88) 352 separable_conv_1_normal_left2_5[0\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_bn_normal_righ (None, 2, 4, 88) 352 separable_conv_1_normal_right2_5[\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_bn_normal_left (None, 2, 4, 88) 352 separable_conv_1_normal_left5_5[0\n",
"__________________________________________________________________________________________________\n",
"activation_83 (Activation) (None, 2, 4, 88) 0 separable_conv_1_bn_normal_left1_\n",
"__________________________________________________________________________________________________\n",
"activation_85 (Activation) (None, 2, 4, 88) 0 separable_conv_1_bn_normal_right1\n",
"__________________________________________________________________________________________________\n",
"activation_87 (Activation) (None, 2, 4, 88) 0 separable_conv_1_bn_normal_left2_\n",
"__________________________________________________________________________________________________\n",
"activation_89 (Activation) (None, 2, 4, 88) 0 separable_conv_1_bn_normal_right2\n",
"__________________________________________________________________________________________________\n",
"activation_91 (Activation) (None, 2, 4, 88) 0 separable_conv_1_bn_normal_left5_\n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_normal_left1_5 (None, 2, 4, 88) 9944 activation_83[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_normal_right1_ (None, 2, 4, 88) 8536 activation_85[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_normal_left2_5 (None, 2, 4, 88) 9944 activation_87[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_normal_right2_ (None, 2, 4, 88) 8536 activation_89[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_normal_left5_5 (None, 2, 4, 88) 8536 activation_91[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_bn_normal_left (None, 2, 4, 88) 352 separable_conv_2_normal_left1_5[0\n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_bn_normal_righ (None, 2, 4, 88) 352 separable_conv_2_normal_right1_5[\n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_bn_normal_left (None, 2, 4, 88) 352 separable_conv_2_normal_left2_5[0\n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_bn_normal_righ (None, 2, 4, 88) 352 separable_conv_2_normal_right2_5[\n",
"__________________________________________________________________________________________________\n",
"normal_left3_5 (AveragePooling2 (None, 2, 4, 88) 0 normal_bn_1_5[0][0] \n",
"__________________________________________________________________________________________________\n",
"normal_left4_5 (AveragePooling2 (None, 2, 4, 88) 0 adjust_bn_5[0][0] \n",
"__________________________________________________________________________________________________\n",
"normal_right4_5 (AveragePooling (None, 2, 4, 88) 0 adjust_bn_5[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_bn_normal_left (None, 2, 4, 88) 352 separable_conv_2_normal_left5_5[0\n",
"__________________________________________________________________________________________________\n",
"normal_add_1_5 (Add) (None, 2, 4, 88) 0 separable_conv_2_bn_normal_left1_\n",
" separable_conv_2_bn_normal_right1\n",
"__________________________________________________________________________________________________\n",
"normal_add_2_5 (Add) (None, 2, 4, 88) 0 separable_conv_2_bn_normal_left2_\n",
" separable_conv_2_bn_normal_right2\n",
"__________________________________________________________________________________________________\n",
"normal_add_3_5 (Add) (None, 2, 4, 88) 0 normal_left3_5[0][0] \n",
" adjust_bn_5[0][0] \n",
"__________________________________________________________________________________________________\n",
"normal_add_4_5 (Add) (None, 2, 4, 88) 0 normal_left4_5[0][0] \n",
" normal_right4_5[0][0] \n",
"__________________________________________________________________________________________________\n",
"normal_add_5_5 (Add) (None, 2, 4, 88) 0 separable_conv_2_bn_normal_left5_\n",
" normal_bn_1_5[0][0] \n",
"__________________________________________________________________________________________________\n",
"normal_concat_5 (Concatenate) (None, 2, 4, 528) 0 adjust_bn_5[0][0] \n",
" normal_add_1_5[0][0] \n",
" normal_add_2_5[0][0] \n",
" normal_add_3_5[0][0] \n",
" normal_add_4_5[0][0] \n",
" normal_add_5_5[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_92 (Activation) (None, 2, 4, 352) 0 reduction_concat_reduce_4[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_93 (Activation) (None, 2, 4, 528) 0 normal_concat_5[0][0] \n",
"__________________________________________________________________________________________________\n",
"adjust_conv_projection_6 (Conv2 (None, 2, 4, 88) 30976 activation_92[0][0] \n",
"__________________________________________________________________________________________________\n",
"normal_conv_1_6 (Conv2D) (None, 2, 4, 88) 46464 activation_93[0][0] \n",
"__________________________________________________________________________________________________\n",
"adjust_bn_6 (BatchNormalization (None, 2, 4, 88) 352 adjust_conv_projection_6[0][0] \n",
"__________________________________________________________________________________________________\n",
"normal_bn_1_6 (BatchNormalizati (None, 2, 4, 88) 352 normal_conv_1_6[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_94 (Activation) (None, 2, 4, 88) 0 normal_bn_1_6[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_96 (Activation) (None, 2, 4, 88) 0 adjust_bn_6[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_98 (Activation) (None, 2, 4, 88) 0 adjust_bn_6[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_100 (Activation) (None, 2, 4, 88) 0 adjust_bn_6[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_102 (Activation) (None, 2, 4, 88) 0 normal_bn_1_6[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_normal_left1_6 (None, 2, 4, 88) 9944 activation_94[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_normal_right1_ (None, 2, 4, 88) 8536 activation_96[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_normal_left2_6 (None, 2, 4, 88) 9944 activation_98[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_normal_right2_ (None, 2, 4, 88) 8536 activation_100[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_normal_left5_6 (None, 2, 4, 88) 8536 activation_102[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_bn_normal_left (None, 2, 4, 88) 352 separable_conv_1_normal_left1_6[0\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_bn_normal_righ (None, 2, 4, 88) 352 separable_conv_1_normal_right1_6[\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_bn_normal_left (None, 2, 4, 88) 352 separable_conv_1_normal_left2_6[0\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_bn_normal_righ (None, 2, 4, 88) 352 separable_conv_1_normal_right2_6[\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_bn_normal_left (None, 2, 4, 88) 352 separable_conv_1_normal_left5_6[0\n",
"__________________________________________________________________________________________________\n",
"activation_95 (Activation) (None, 2, 4, 88) 0 separable_conv_1_bn_normal_left1_\n",
"__________________________________________________________________________________________________\n",
"activation_97 (Activation) (None, 2, 4, 88) 0 separable_conv_1_bn_normal_right1\n",
"__________________________________________________________________________________________________\n",
"activation_99 (Activation) (None, 2, 4, 88) 0 separable_conv_1_bn_normal_left2_\n",
"__________________________________________________________________________________________________\n",
"activation_101 (Activation) (None, 2, 4, 88) 0 separable_conv_1_bn_normal_right2\n",
"__________________________________________________________________________________________________\n",
"activation_103 (Activation) (None, 2, 4, 88) 0 separable_conv_1_bn_normal_left5_\n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_normal_left1_6 (None, 2, 4, 88) 9944 activation_95[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_normal_right1_ (None, 2, 4, 88) 8536 activation_97[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_normal_left2_6 (None, 2, 4, 88) 9944 activation_99[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_normal_right2_ (None, 2, 4, 88) 8536 activation_101[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_normal_left5_6 (None, 2, 4, 88) 8536 activation_103[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_bn_normal_left (None, 2, 4, 88) 352 separable_conv_2_normal_left1_6[0\n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_bn_normal_righ (None, 2, 4, 88) 352 separable_conv_2_normal_right1_6[\n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_bn_normal_left (None, 2, 4, 88) 352 separable_conv_2_normal_left2_6[0\n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_bn_normal_righ (None, 2, 4, 88) 352 separable_conv_2_normal_right2_6[\n",
"__________________________________________________________________________________________________\n",
"normal_left3_6 (AveragePooling2 (None, 2, 4, 88) 0 normal_bn_1_6[0][0] \n",
"__________________________________________________________________________________________________\n",
"normal_left4_6 (AveragePooling2 (None, 2, 4, 88) 0 adjust_bn_6[0][0] \n",
"__________________________________________________________________________________________________\n",
"normal_right4_6 (AveragePooling (None, 2, 4, 88) 0 adjust_bn_6[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_bn_normal_left (None, 2, 4, 88) 352 separable_conv_2_normal_left5_6[0\n",
"__________________________________________________________________________________________________\n",
"normal_add_1_6 (Add) (None, 2, 4, 88) 0 separable_conv_2_bn_normal_left1_\n",
" separable_conv_2_bn_normal_right1\n",
"__________________________________________________________________________________________________\n",
"normal_add_2_6 (Add) (None, 2, 4, 88) 0 separable_conv_2_bn_normal_left2_\n",
" separable_conv_2_bn_normal_right2\n",
"__________________________________________________________________________________________________\n",
"normal_add_3_6 (Add) (None, 2, 4, 88) 0 normal_left3_6[0][0] \n",
" adjust_bn_6[0][0] \n",
"__________________________________________________________________________________________________\n",
"normal_add_4_6 (Add) (None, 2, 4, 88) 0 normal_left4_6[0][0] \n",
" normal_right4_6[0][0] \n",
"__________________________________________________________________________________________________\n",
"normal_add_5_6 (Add) (None, 2, 4, 88) 0 separable_conv_2_bn_normal_left5_\n",
" normal_bn_1_6[0][0] \n",
"__________________________________________________________________________________________________\n",
"normal_concat_6 (Concatenate) (None, 2, 4, 528) 0 adjust_bn_6[0][0] \n",
" normal_add_1_6[0][0] \n",
" normal_add_2_6[0][0] \n",
" normal_add_3_6[0][0] \n",
" normal_add_4_6[0][0] \n",
" normal_add_5_6[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_104 (Activation) (None, 2, 4, 528) 0 normal_concat_5[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_105 (Activation) (None, 2, 4, 528) 0 normal_concat_6[0][0] \n",
"__________________________________________________________________________________________________\n",
"adjust_conv_projection_7 (Conv2 (None, 2, 4, 88) 46464 activation_104[0][0] \n",
"__________________________________________________________________________________________________\n",
"normal_conv_1_7 (Conv2D) (None, 2, 4, 88) 46464 activation_105[0][0] \n",
"__________________________________________________________________________________________________\n",
"adjust_bn_7 (BatchNormalization (None, 2, 4, 88) 352 adjust_conv_projection_7[0][0] \n",
"__________________________________________________________________________________________________\n",
"normal_bn_1_7 (BatchNormalizati (None, 2, 4, 88) 352 normal_conv_1_7[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_106 (Activation) (None, 2, 4, 88) 0 normal_bn_1_7[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_108 (Activation) (None, 2, 4, 88) 0 adjust_bn_7[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_110 (Activation) (None, 2, 4, 88) 0 adjust_bn_7[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_112 (Activation) (None, 2, 4, 88) 0 adjust_bn_7[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_114 (Activation) (None, 2, 4, 88) 0 normal_bn_1_7[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_normal_left1_7 (None, 2, 4, 88) 9944 activation_106[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_normal_right1_ (None, 2, 4, 88) 8536 activation_108[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_normal_left2_7 (None, 2, 4, 88) 9944 activation_110[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_normal_right2_ (None, 2, 4, 88) 8536 activation_112[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_normal_left5_7 (None, 2, 4, 88) 8536 activation_114[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_bn_normal_left (None, 2, 4, 88) 352 separable_conv_1_normal_left1_7[0\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_bn_normal_righ (None, 2, 4, 88) 352 separable_conv_1_normal_right1_7[\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_bn_normal_left (None, 2, 4, 88) 352 separable_conv_1_normal_left2_7[0\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_bn_normal_righ (None, 2, 4, 88) 352 separable_conv_1_normal_right2_7[\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_bn_normal_left (None, 2, 4, 88) 352 separable_conv_1_normal_left5_7[0\n",
"__________________________________________________________________________________________________\n",
"activation_107 (Activation) (None, 2, 4, 88) 0 separable_conv_1_bn_normal_left1_\n",
"__________________________________________________________________________________________________\n",
"activation_109 (Activation) (None, 2, 4, 88) 0 separable_conv_1_bn_normal_right1\n",
"__________________________________________________________________________________________________\n",
"activation_111 (Activation) (None, 2, 4, 88) 0 separable_conv_1_bn_normal_left2_\n",
"__________________________________________________________________________________________________\n",
"activation_113 (Activation) (None, 2, 4, 88) 0 separable_conv_1_bn_normal_right2\n",
"__________________________________________________________________________________________________\n",
"activation_115 (Activation) (None, 2, 4, 88) 0 separable_conv_1_bn_normal_left5_\n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_normal_left1_7 (None, 2, 4, 88) 9944 activation_107[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_normal_right1_ (None, 2, 4, 88) 8536 activation_109[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_normal_left2_7 (None, 2, 4, 88) 9944 activation_111[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_normal_right2_ (None, 2, 4, 88) 8536 activation_113[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_normal_left5_7 (None, 2, 4, 88) 8536 activation_115[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_bn_normal_left (None, 2, 4, 88) 352 separable_conv_2_normal_left1_7[0\n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_bn_normal_righ (None, 2, 4, 88) 352 separable_conv_2_normal_right1_7[\n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_bn_normal_left (None, 2, 4, 88) 352 separable_conv_2_normal_left2_7[0\n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_bn_normal_righ (None, 2, 4, 88) 352 separable_conv_2_normal_right2_7[\n",
"__________________________________________________________________________________________________\n",
"normal_left3_7 (AveragePooling2 (None, 2, 4, 88) 0 normal_bn_1_7[0][0] \n",
"__________________________________________________________________________________________________\n",
"normal_left4_7 (AveragePooling2 (None, 2, 4, 88) 0 adjust_bn_7[0][0] \n",
"__________________________________________________________________________________________________\n",
"normal_right4_7 (AveragePooling (None, 2, 4, 88) 0 adjust_bn_7[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_bn_normal_left (None, 2, 4, 88) 352 separable_conv_2_normal_left5_7[0\n",
"__________________________________________________________________________________________________\n",
"normal_add_1_7 (Add) (None, 2, 4, 88) 0 separable_conv_2_bn_normal_left1_\n",
" separable_conv_2_bn_normal_right1\n",
"__________________________________________________________________________________________________\n",
"normal_add_2_7 (Add) (None, 2, 4, 88) 0 separable_conv_2_bn_normal_left2_\n",
" separable_conv_2_bn_normal_right2\n",
"__________________________________________________________________________________________________\n",
"normal_add_3_7 (Add) (None, 2, 4, 88) 0 normal_left3_7[0][0] \n",
" adjust_bn_7[0][0] \n",
"__________________________________________________________________________________________________\n",
"normal_add_4_7 (Add) (None, 2, 4, 88) 0 normal_left4_7[0][0] \n",
" normal_right4_7[0][0] \n",
"__________________________________________________________________________________________________\n",
"normal_add_5_7 (Add) (None, 2, 4, 88) 0 separable_conv_2_bn_normal_left5_\n",
" normal_bn_1_7[0][0] \n",
"__________________________________________________________________________________________________\n",
"normal_concat_7 (Concatenate) (None, 2, 4, 528) 0 adjust_bn_7[0][0] \n",
" normal_add_1_7[0][0] \n",
" normal_add_2_7[0][0] \n",
" normal_add_3_7[0][0] \n",
" normal_add_4_7[0][0] \n",
" normal_add_5_7[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_116 (Activation) (None, 2, 4, 528) 0 normal_concat_6[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_117 (Activation) (None, 2, 4, 528) 0 normal_concat_7[0][0] \n",
"__________________________________________________________________________________________________\n",
"adjust_conv_projection_8 (Conv2 (None, 2, 4, 88) 46464 activation_116[0][0] \n",
"__________________________________________________________________________________________________\n",
"normal_conv_1_8 (Conv2D) (None, 2, 4, 88) 46464 activation_117[0][0] \n",
"__________________________________________________________________________________________________\n",
"adjust_bn_8 (BatchNormalization (None, 2, 4, 88) 352 adjust_conv_projection_8[0][0] \n",
"__________________________________________________________________________________________________\n",
"normal_bn_1_8 (BatchNormalizati (None, 2, 4, 88) 352 normal_conv_1_8[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_118 (Activation) (None, 2, 4, 88) 0 normal_bn_1_8[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_120 (Activation) (None, 2, 4, 88) 0 adjust_bn_8[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_122 (Activation) (None, 2, 4, 88) 0 adjust_bn_8[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_124 (Activation) (None, 2, 4, 88) 0 adjust_bn_8[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_126 (Activation) (None, 2, 4, 88) 0 normal_bn_1_8[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_normal_left1_8 (None, 2, 4, 88) 9944 activation_118[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_normal_right1_ (None, 2, 4, 88) 8536 activation_120[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_normal_left2_8 (None, 2, 4, 88) 9944 activation_122[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_normal_right2_ (None, 2, 4, 88) 8536 activation_124[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_normal_left5_8 (None, 2, 4, 88) 8536 activation_126[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_bn_normal_left (None, 2, 4, 88) 352 separable_conv_1_normal_left1_8[0\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_bn_normal_righ (None, 2, 4, 88) 352 separable_conv_1_normal_right1_8[\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_bn_normal_left (None, 2, 4, 88) 352 separable_conv_1_normal_left2_8[0\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_bn_normal_righ (None, 2, 4, 88) 352 separable_conv_1_normal_right2_8[\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_bn_normal_left (None, 2, 4, 88) 352 separable_conv_1_normal_left5_8[0\n",
"__________________________________________________________________________________________________\n",
"activation_119 (Activation) (None, 2, 4, 88) 0 separable_conv_1_bn_normal_left1_\n",
"__________________________________________________________________________________________________\n",
"activation_121 (Activation) (None, 2, 4, 88) 0 separable_conv_1_bn_normal_right1\n",
"__________________________________________________________________________________________________\n",
"activation_123 (Activation) (None, 2, 4, 88) 0 separable_conv_1_bn_normal_left2_\n",
"__________________________________________________________________________________________________\n",
"activation_125 (Activation) (None, 2, 4, 88) 0 separable_conv_1_bn_normal_right2\n",
"__________________________________________________________________________________________________\n",
"activation_127 (Activation) (None, 2, 4, 88) 0 separable_conv_1_bn_normal_left5_\n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_normal_left1_8 (None, 2, 4, 88) 9944 activation_119[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_normal_right1_ (None, 2, 4, 88) 8536 activation_121[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_normal_left2_8 (None, 2, 4, 88) 9944 activation_123[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_normal_right2_ (None, 2, 4, 88) 8536 activation_125[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_normal_left5_8 (None, 2, 4, 88) 8536 activation_127[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_bn_normal_left (None, 2, 4, 88) 352 separable_conv_2_normal_left1_8[0\n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_bn_normal_righ (None, 2, 4, 88) 352 separable_conv_2_normal_right1_8[\n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_bn_normal_left (None, 2, 4, 88) 352 separable_conv_2_normal_left2_8[0\n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_bn_normal_righ (None, 2, 4, 88) 352 separable_conv_2_normal_right2_8[\n",
"__________________________________________________________________________________________________\n",
"normal_left3_8 (AveragePooling2 (None, 2, 4, 88) 0 normal_bn_1_8[0][0] \n",
"__________________________________________________________________________________________________\n",
"normal_left4_8 (AveragePooling2 (None, 2, 4, 88) 0 adjust_bn_8[0][0] \n",
"__________________________________________________________________________________________________\n",
"normal_right4_8 (AveragePooling (None, 2, 4, 88) 0 adjust_bn_8[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_bn_normal_left (None, 2, 4, 88) 352 separable_conv_2_normal_left5_8[0\n",
"__________________________________________________________________________________________________\n",
"normal_add_1_8 (Add) (None, 2, 4, 88) 0 separable_conv_2_bn_normal_left1_\n",
" separable_conv_2_bn_normal_right1\n",
"__________________________________________________________________________________________________\n",
"normal_add_2_8 (Add) (None, 2, 4, 88) 0 separable_conv_2_bn_normal_left2_\n",
" separable_conv_2_bn_normal_right2\n",
"__________________________________________________________________________________________________\n",
"normal_add_3_8 (Add) (None, 2, 4, 88) 0 normal_left3_8[0][0] \n",
" adjust_bn_8[0][0] \n",
"__________________________________________________________________________________________________\n",
"normal_add_4_8 (Add) (None, 2, 4, 88) 0 normal_left4_8[0][0] \n",
" normal_right4_8[0][0] \n",
"__________________________________________________________________________________________________\n",
"normal_add_5_8 (Add) (None, 2, 4, 88) 0 separable_conv_2_bn_normal_left5_\n",
" normal_bn_1_8[0][0] \n",
"__________________________________________________________________________________________________\n",
"normal_concat_8 (Concatenate) (None, 2, 4, 528) 0 adjust_bn_8[0][0] \n",
" normal_add_1_8[0][0] \n",
" normal_add_2_8[0][0] \n",
" normal_add_3_8[0][0] \n",
" normal_add_4_8[0][0] \n",
" normal_add_5_8[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_129 (Activation) (None, 2, 4, 528) 0 normal_concat_8[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_128 (Activation) (None, 2, 4, 528) 0 normal_concat_7[0][0] \n",
"__________________________________________________________________________________________________\n",
"reduction_conv_1_reduce_8 (Conv (None, 2, 4, 176) 92928 activation_129[0][0] \n",
"__________________________________________________________________________________________________\n",
"adjust_conv_projection_reduce_8 (None, 2, 4, 176) 92928 activation_128[0][0] \n",
"__________________________________________________________________________________________________\n",
"reduction_bn_1_reduce_8 (BatchN (None, 2, 4, 176) 704 reduction_conv_1_reduce_8[0][0] \n",
"__________________________________________________________________________________________________\n",
"adjust_bn_reduce_8 (BatchNormal (None, 2, 4, 176) 704 adjust_conv_projection_reduce_8[0\n",
"__________________________________________________________________________________________________\n",
"activation_130 (Activation) (None, 2, 4, 176) 0 reduction_bn_1_reduce_8[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_132 (Activation) (None, 2, 4, 176) 0 adjust_bn_reduce_8[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_pad_reduction_ (None, 5, 7, 176) 0 activation_130[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_pad_reduction_ (None, 7, 9, 176) 0 activation_132[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_reduction_left (None, 1, 2, 176) 35376 separable_conv_1_pad_reduction_le\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_reduction_righ (None, 1, 2, 176) 39600 separable_conv_1_pad_reduction_ri\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_bn_reduction_l (None, 1, 2, 176) 704 separable_conv_1_reduction_left1_\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_bn_reduction_r (None, 1, 2, 176) 704 separable_conv_1_reduction_right1\n",
"__________________________________________________________________________________________________\n",
"activation_131 (Activation) (None, 1, 2, 176) 0 separable_conv_1_bn_reduction_lef\n",
"__________________________________________________________________________________________________\n",
"activation_133 (Activation) (None, 1, 2, 176) 0 separable_conv_1_bn_reduction_rig\n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_reduction_left (None, 1, 2, 176) 35376 activation_131[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_reduction_righ (None, 1, 2, 176) 39600 activation_133[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_134 (Activation) (None, 2, 4, 176) 0 adjust_bn_reduce_8[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_bn_reduction_l (None, 1, 2, 176) 704 separable_conv_2_reduction_left1_\n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_bn_reduction_r (None, 1, 2, 176) 704 separable_conv_2_reduction_right1\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_pad_reduction_ (None, 7, 9, 176) 0 activation_134[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_136 (Activation) (None, 2, 4, 176) 0 adjust_bn_reduce_8[0][0] \n",
"__________________________________________________________________________________________________\n",
"reduction_add_1_reduce_8 (Add) (None, 1, 2, 176) 0 separable_conv_2_bn_reduction_lef\n",
" separable_conv_2_bn_reduction_rig\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_reduction_righ (None, 1, 2, 176) 39600 separable_conv_1_pad_reduction_ri\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_pad_reduction_ (None, 5, 7, 176) 0 activation_136[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_138 (Activation) (None, 1, 2, 176) 0 reduction_add_1_reduce_8[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_bn_reduction_r (None, 1, 2, 176) 704 separable_conv_1_reduction_right2\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_reduction_righ (None, 1, 2, 176) 35376 separable_conv_1_pad_reduction_ri\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_reduction_left (None, 1, 2, 176) 32560 activation_138[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_135 (Activation) (None, 1, 2, 176) 0 separable_conv_1_bn_reduction_rig\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_bn_reduction_r (None, 1, 2, 176) 704 separable_conv_1_reduction_right3\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_bn_reduction_l (None, 1, 2, 176) 704 separable_conv_1_reduction_left4_\n",
"__________________________________________________________________________________________________\n",
"reduction_pad_1_reduce_8 (ZeroP (None, 3, 5, 176) 0 reduction_bn_1_reduce_8[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_reduction_righ (None, 1, 2, 176) 39600 activation_135[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_137 (Activation) (None, 1, 2, 176) 0 separable_conv_1_bn_reduction_rig\n",
"__________________________________________________________________________________________________\n",
"activation_139 (Activation) (None, 1, 2, 176) 0 separable_conv_1_bn_reduction_lef\n",
"__________________________________________________________________________________________________\n",
"reduction_left2_reduce_8 (MaxPo (None, 1, 2, 176) 0 reduction_pad_1_reduce_8[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_bn_reduction_r (None, 1, 2, 176) 704 separable_conv_2_reduction_right2\n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_reduction_righ (None, 1, 2, 176) 35376 activation_137[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_reduction_left (None, 1, 2, 176) 32560 activation_139[0][0] \n",
"__________________________________________________________________________________________________\n",
"adjust_relu_1_9 (Activation) (None, 2, 4, 528) 0 normal_concat_8[0][0] \n",
"__________________________________________________________________________________________________\n",
"reduction_add_2_reduce_8 (Add) (None, 1, 2, 176) 0 reduction_left2_reduce_8[0][0] \n",
" separable_conv_2_bn_reduction_rig\n",
"__________________________________________________________________________________________________\n",
"reduction_left3_reduce_8 (Avera (None, 1, 2, 176) 0 reduction_pad_1_reduce_8[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_bn_reduction_r (None, 1, 2, 176) 704 separable_conv_2_reduction_right3\n",
"__________________________________________________________________________________________________\n",
"reduction_left4_reduce_8 (Avera (None, 1, 2, 176) 0 reduction_add_1_reduce_8[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_bn_reduction_l (None, 1, 2, 176) 704 separable_conv_2_reduction_left4_\n",
"__________________________________________________________________________________________________\n",
"reduction_right5_reduce_8 (MaxP (None, 1, 2, 176) 0 reduction_pad_1_reduce_8[0][0] \n",
"__________________________________________________________________________________________________\n",
"zero_padding2d_3 (ZeroPadding2D (None, 3, 5, 528) 0 adjust_relu_1_9[0][0] \n",
"__________________________________________________________________________________________________\n",
"reduction_add3_reduce_8 (Add) (None, 1, 2, 176) 0 reduction_left3_reduce_8[0][0] \n",
" separable_conv_2_bn_reduction_rig\n",
"__________________________________________________________________________________________________\n",
"add_3 (Add) (None, 1, 2, 176) 0 reduction_add_2_reduce_8[0][0] \n",
" reduction_left4_reduce_8[0][0] \n",
"__________________________________________________________________________________________________\n",
"reduction_add4_reduce_8 (Add) (None, 1, 2, 176) 0 separable_conv_2_bn_reduction_lef\n",
" reduction_right5_reduce_8[0][0] \n",
"__________________________________________________________________________________________________\n",
"cropping2d_3 (Cropping2D) (None, 2, 4, 528) 0 zero_padding2d_3[0][0] \n",
"__________________________________________________________________________________________________\n",
"reduction_concat_reduce_8 (Conc (None, 1, 2, 704) 0 reduction_add_2_reduce_8[0][0] \n",
" reduction_add3_reduce_8[0][0] \n",
" add_3[0][0] \n",
" reduction_add4_reduce_8[0][0] \n",
"__________________________________________________________________________________________________\n",
"adjust_avg_pool_1_9 (AveragePoo (None, 1, 2, 528) 0 adjust_relu_1_9[0][0] \n",
"__________________________________________________________________________________________________\n",
"adjust_avg_pool_2_9 (AveragePoo (None, 1, 2, 528) 0 cropping2d_3[0][0] \n",
"__________________________________________________________________________________________________\n",
"adjust_conv_1_9 (Conv2D) (None, 1, 2, 88) 46464 adjust_avg_pool_1_9[0][0] \n",
"__________________________________________________________________________________________________\n",
"adjust_conv_2_9 (Conv2D) (None, 1, 2, 88) 46464 adjust_avg_pool_2_9[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_140 (Activation) (None, 1, 2, 704) 0 reduction_concat_reduce_8[0][0] \n",
"__________________________________________________________________________________________________\n",
"concatenate_3 (Concatenate) (None, 1, 2, 176) 0 adjust_conv_1_9[0][0] \n",
" adjust_conv_2_9[0][0] \n",
"__________________________________________________________________________________________________\n",
"normal_conv_1_9 (Conv2D) (None, 1, 2, 176) 123904 activation_140[0][0] \n",
"__________________________________________________________________________________________________\n",
"adjust_bn_9 (BatchNormalization (None, 1, 2, 176) 704 concatenate_3[0][0] \n",
"__________________________________________________________________________________________________\n",
"normal_bn_1_9 (BatchNormalizati (None, 1, 2, 176) 704 normal_conv_1_9[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_141 (Activation) (None, 1, 2, 176) 0 normal_bn_1_9[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_143 (Activation) (None, 1, 2, 176) 0 adjust_bn_9[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_145 (Activation) (None, 1, 2, 176) 0 adjust_bn_9[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_147 (Activation) (None, 1, 2, 176) 0 adjust_bn_9[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_149 (Activation) (None, 1, 2, 176) 0 normal_bn_1_9[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_normal_left1_9 (None, 1, 2, 176) 35376 activation_141[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_normal_right1_ (None, 1, 2, 176) 32560 activation_143[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_normal_left2_9 (None, 1, 2, 176) 35376 activation_145[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_normal_right2_ (None, 1, 2, 176) 32560 activation_147[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_normal_left5_9 (None, 1, 2, 176) 32560 activation_149[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_bn_normal_left (None, 1, 2, 176) 704 separable_conv_1_normal_left1_9[0\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_bn_normal_righ (None, 1, 2, 176) 704 separable_conv_1_normal_right1_9[\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_bn_normal_left (None, 1, 2, 176) 704 separable_conv_1_normal_left2_9[0\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_bn_normal_righ (None, 1, 2, 176) 704 separable_conv_1_normal_right2_9[\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_bn_normal_left (None, 1, 2, 176) 704 separable_conv_1_normal_left5_9[0\n",
"__________________________________________________________________________________________________\n",
"activation_142 (Activation) (None, 1, 2, 176) 0 separable_conv_1_bn_normal_left1_\n",
"__________________________________________________________________________________________________\n",
"activation_144 (Activation) (None, 1, 2, 176) 0 separable_conv_1_bn_normal_right1\n",
"__________________________________________________________________________________________________\n",
"activation_146 (Activation) (None, 1, 2, 176) 0 separable_conv_1_bn_normal_left2_\n",
"__________________________________________________________________________________________________\n",
"activation_148 (Activation) (None, 1, 2, 176) 0 separable_conv_1_bn_normal_right2\n",
"__________________________________________________________________________________________________\n",
"activation_150 (Activation) (None, 1, 2, 176) 0 separable_conv_1_bn_normal_left5_\n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_normal_left1_9 (None, 1, 2, 176) 35376 activation_142[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_normal_right1_ (None, 1, 2, 176) 32560 activation_144[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_normal_left2_9 (None, 1, 2, 176) 35376 activation_146[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_normal_right2_ (None, 1, 2, 176) 32560 activation_148[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_normal_left5_9 (None, 1, 2, 176) 32560 activation_150[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_bn_normal_left (None, 1, 2, 176) 704 separable_conv_2_normal_left1_9[0\n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_bn_normal_righ (None, 1, 2, 176) 704 separable_conv_2_normal_right1_9[\n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_bn_normal_left (None, 1, 2, 176) 704 separable_conv_2_normal_left2_9[0\n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_bn_normal_righ (None, 1, 2, 176) 704 separable_conv_2_normal_right2_9[\n",
"__________________________________________________________________________________________________\n",
"normal_left3_9 (AveragePooling2 (None, 1, 2, 176) 0 normal_bn_1_9[0][0] \n",
"__________________________________________________________________________________________________\n",
"normal_left4_9 (AveragePooling2 (None, 1, 2, 176) 0 adjust_bn_9[0][0] \n",
"__________________________________________________________________________________________________\n",
"normal_right4_9 (AveragePooling (None, 1, 2, 176) 0 adjust_bn_9[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_bn_normal_left (None, 1, 2, 176) 704 separable_conv_2_normal_left5_9[0\n",
"__________________________________________________________________________________________________\n",
"normal_add_1_9 (Add) (None, 1, 2, 176) 0 separable_conv_2_bn_normal_left1_\n",
" separable_conv_2_bn_normal_right1\n",
"__________________________________________________________________________________________________\n",
"normal_add_2_9 (Add) (None, 1, 2, 176) 0 separable_conv_2_bn_normal_left2_\n",
" separable_conv_2_bn_normal_right2\n",
"__________________________________________________________________________________________________\n",
"normal_add_3_9 (Add) (None, 1, 2, 176) 0 normal_left3_9[0][0] \n",
" adjust_bn_9[0][0] \n",
"__________________________________________________________________________________________________\n",
"normal_add_4_9 (Add) (None, 1, 2, 176) 0 normal_left4_9[0][0] \n",
" normal_right4_9[0][0] \n",
"__________________________________________________________________________________________________\n",
"normal_add_5_9 (Add) (None, 1, 2, 176) 0 separable_conv_2_bn_normal_left5_\n",
" normal_bn_1_9[0][0] \n",
"__________________________________________________________________________________________________\n",
"normal_concat_9 (Concatenate) (None, 1, 2, 1056) 0 adjust_bn_9[0][0] \n",
" normal_add_1_9[0][0] \n",
" normal_add_2_9[0][0] \n",
" normal_add_3_9[0][0] \n",
" normal_add_4_9[0][0] \n",
" normal_add_5_9[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_151 (Activation) (None, 1, 2, 704) 0 reduction_concat_reduce_8[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_152 (Activation) (None, 1, 2, 1056) 0 normal_concat_9[0][0] \n",
"__________________________________________________________________________________________________\n",
"adjust_conv_projection_10 (Conv (None, 1, 2, 176) 123904 activation_151[0][0] \n",
"__________________________________________________________________________________________________\n",
"normal_conv_1_10 (Conv2D) (None, 1, 2, 176) 185856 activation_152[0][0] \n",
"__________________________________________________________________________________________________\n",
"adjust_bn_10 (BatchNormalizatio (None, 1, 2, 176) 704 adjust_conv_projection_10[0][0] \n",
"__________________________________________________________________________________________________\n",
"normal_bn_1_10 (BatchNormalizat (None, 1, 2, 176) 704 normal_conv_1_10[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_153 (Activation) (None, 1, 2, 176) 0 normal_bn_1_10[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_155 (Activation) (None, 1, 2, 176) 0 adjust_bn_10[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_157 (Activation) (None, 1, 2, 176) 0 adjust_bn_10[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_159 (Activation) (None, 1, 2, 176) 0 adjust_bn_10[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_161 (Activation) (None, 1, 2, 176) 0 normal_bn_1_10[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_normal_left1_1 (None, 1, 2, 176) 35376 activation_153[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_normal_right1_ (None, 1, 2, 176) 32560 activation_155[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_normal_left2_1 (None, 1, 2, 176) 35376 activation_157[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_normal_right2_ (None, 1, 2, 176) 32560 activation_159[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_normal_left5_1 (None, 1, 2, 176) 32560 activation_161[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_bn_normal_left (None, 1, 2, 176) 704 separable_conv_1_normal_left1_10[\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_bn_normal_righ (None, 1, 2, 176) 704 separable_conv_1_normal_right1_10\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_bn_normal_left (None, 1, 2, 176) 704 separable_conv_1_normal_left2_10[\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_bn_normal_righ (None, 1, 2, 176) 704 separable_conv_1_normal_right2_10\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_bn_normal_left (None, 1, 2, 176) 704 separable_conv_1_normal_left5_10[\n",
"__________________________________________________________________________________________________\n",
"activation_154 (Activation) (None, 1, 2, 176) 0 separable_conv_1_bn_normal_left1_\n",
"__________________________________________________________________________________________________\n",
"activation_156 (Activation) (None, 1, 2, 176) 0 separable_conv_1_bn_normal_right1\n",
"__________________________________________________________________________________________________\n",
"activation_158 (Activation) (None, 1, 2, 176) 0 separable_conv_1_bn_normal_left2_\n",
"__________________________________________________________________________________________________\n",
"activation_160 (Activation) (None, 1, 2, 176) 0 separable_conv_1_bn_normal_right2\n",
"__________________________________________________________________________________________________\n",
"activation_162 (Activation) (None, 1, 2, 176) 0 separable_conv_1_bn_normal_left5_\n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_normal_left1_1 (None, 1, 2, 176) 35376 activation_154[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_normal_right1_ (None, 1, 2, 176) 32560 activation_156[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_normal_left2_1 (None, 1, 2, 176) 35376 activation_158[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_normal_right2_ (None, 1, 2, 176) 32560 activation_160[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_normal_left5_1 (None, 1, 2, 176) 32560 activation_162[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_bn_normal_left (None, 1, 2, 176) 704 separable_conv_2_normal_left1_10[\n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_bn_normal_righ (None, 1, 2, 176) 704 separable_conv_2_normal_right1_10\n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_bn_normal_left (None, 1, 2, 176) 704 separable_conv_2_normal_left2_10[\n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_bn_normal_righ (None, 1, 2, 176) 704 separable_conv_2_normal_right2_10\n",
"__________________________________________________________________________________________________\n",
"normal_left3_10 (AveragePooling (None, 1, 2, 176) 0 normal_bn_1_10[0][0] \n",
"__________________________________________________________________________________________________\n",
"normal_left4_10 (AveragePooling (None, 1, 2, 176) 0 adjust_bn_10[0][0] \n",
"__________________________________________________________________________________________________\n",
"normal_right4_10 (AveragePoolin (None, 1, 2, 176) 0 adjust_bn_10[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_bn_normal_left (None, 1, 2, 176) 704 separable_conv_2_normal_left5_10[\n",
"__________________________________________________________________________________________________\n",
"normal_add_1_10 (Add) (None, 1, 2, 176) 0 separable_conv_2_bn_normal_left1_\n",
" separable_conv_2_bn_normal_right1\n",
"__________________________________________________________________________________________________\n",
"normal_add_2_10 (Add) (None, 1, 2, 176) 0 separable_conv_2_bn_normal_left2_\n",
" separable_conv_2_bn_normal_right2\n",
"__________________________________________________________________________________________________\n",
"normal_add_3_10 (Add) (None, 1, 2, 176) 0 normal_left3_10[0][0] \n",
" adjust_bn_10[0][0] \n",
"__________________________________________________________________________________________________\n",
"normal_add_4_10 (Add) (None, 1, 2, 176) 0 normal_left4_10[0][0] \n",
" normal_right4_10[0][0] \n",
"__________________________________________________________________________________________________\n",
"normal_add_5_10 (Add) (None, 1, 2, 176) 0 separable_conv_2_bn_normal_left5_\n",
" normal_bn_1_10[0][0] \n",
"__________________________________________________________________________________________________\n",
"normal_concat_10 (Concatenate) (None, 1, 2, 1056) 0 adjust_bn_10[0][0] \n",
" normal_add_1_10[0][0] \n",
" normal_add_2_10[0][0] \n",
" normal_add_3_10[0][0] \n",
" normal_add_4_10[0][0] \n",
" normal_add_5_10[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_163 (Activation) (None, 1, 2, 1056) 0 normal_concat_9[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_164 (Activation) (None, 1, 2, 1056) 0 normal_concat_10[0][0] \n",
"__________________________________________________________________________________________________\n",
"adjust_conv_projection_11 (Conv (None, 1, 2, 176) 185856 activation_163[0][0] \n",
"__________________________________________________________________________________________________\n",
"normal_conv_1_11 (Conv2D) (None, 1, 2, 176) 185856 activation_164[0][0] \n",
"__________________________________________________________________________________________________\n",
"adjust_bn_11 (BatchNormalizatio (None, 1, 2, 176) 704 adjust_conv_projection_11[0][0] \n",
"__________________________________________________________________________________________________\n",
"normal_bn_1_11 (BatchNormalizat (None, 1, 2, 176) 704 normal_conv_1_11[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_165 (Activation) (None, 1, 2, 176) 0 normal_bn_1_11[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_167 (Activation) (None, 1, 2, 176) 0 adjust_bn_11[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_169 (Activation) (None, 1, 2, 176) 0 adjust_bn_11[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_171 (Activation) (None, 1, 2, 176) 0 adjust_bn_11[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_173 (Activation) (None, 1, 2, 176) 0 normal_bn_1_11[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_normal_left1_1 (None, 1, 2, 176) 35376 activation_165[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_normal_right1_ (None, 1, 2, 176) 32560 activation_167[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_normal_left2_1 (None, 1, 2, 176) 35376 activation_169[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_normal_right2_ (None, 1, 2, 176) 32560 activation_171[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_normal_left5_1 (None, 1, 2, 176) 32560 activation_173[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_bn_normal_left (None, 1, 2, 176) 704 separable_conv_1_normal_left1_11[\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_bn_normal_righ (None, 1, 2, 176) 704 separable_conv_1_normal_right1_11\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_bn_normal_left (None, 1, 2, 176) 704 separable_conv_1_normal_left2_11[\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_bn_normal_righ (None, 1, 2, 176) 704 separable_conv_1_normal_right2_11\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_bn_normal_left (None, 1, 2, 176) 704 separable_conv_1_normal_left5_11[\n",
"__________________________________________________________________________________________________\n",
"activation_166 (Activation) (None, 1, 2, 176) 0 separable_conv_1_bn_normal_left1_\n",
"__________________________________________________________________________________________________\n",
"activation_168 (Activation) (None, 1, 2, 176) 0 separable_conv_1_bn_normal_right1\n",
"__________________________________________________________________________________________________\n",
"activation_170 (Activation) (None, 1, 2, 176) 0 separable_conv_1_bn_normal_left2_\n",
"__________________________________________________________________________________________________\n",
"activation_172 (Activation) (None, 1, 2, 176) 0 separable_conv_1_bn_normal_right2\n",
"__________________________________________________________________________________________________\n",
"activation_174 (Activation) (None, 1, 2, 176) 0 separable_conv_1_bn_normal_left5_\n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_normal_left1_1 (None, 1, 2, 176) 35376 activation_166[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_normal_right1_ (None, 1, 2, 176) 32560 activation_168[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_normal_left2_1 (None, 1, 2, 176) 35376 activation_170[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_normal_right2_ (None, 1, 2, 176) 32560 activation_172[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_normal_left5_1 (None, 1, 2, 176) 32560 activation_174[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_bn_normal_left (None, 1, 2, 176) 704 separable_conv_2_normal_left1_11[\n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_bn_normal_righ (None, 1, 2, 176) 704 separable_conv_2_normal_right1_11\n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_bn_normal_left (None, 1, 2, 176) 704 separable_conv_2_normal_left2_11[\n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_bn_normal_righ (None, 1, 2, 176) 704 separable_conv_2_normal_right2_11\n",
"__________________________________________________________________________________________________\n",
"normal_left3_11 (AveragePooling (None, 1, 2, 176) 0 normal_bn_1_11[0][0] \n",
"__________________________________________________________________________________________________\n",
"normal_left4_11 (AveragePooling (None, 1, 2, 176) 0 adjust_bn_11[0][0] \n",
"__________________________________________________________________________________________________\n",
"normal_right4_11 (AveragePoolin (None, 1, 2, 176) 0 adjust_bn_11[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_bn_normal_left (None, 1, 2, 176) 704 separable_conv_2_normal_left5_11[\n",
"__________________________________________________________________________________________________\n",
"normal_add_1_11 (Add) (None, 1, 2, 176) 0 separable_conv_2_bn_normal_left1_\n",
" separable_conv_2_bn_normal_right1\n",
"__________________________________________________________________________________________________\n",
"normal_add_2_11 (Add) (None, 1, 2, 176) 0 separable_conv_2_bn_normal_left2_\n",
" separable_conv_2_bn_normal_right2\n",
"__________________________________________________________________________________________________\n",
"normal_add_3_11 (Add) (None, 1, 2, 176) 0 normal_left3_11[0][0] \n",
" adjust_bn_11[0][0] \n",
"__________________________________________________________________________________________________\n",
"normal_add_4_11 (Add) (None, 1, 2, 176) 0 normal_left4_11[0][0] \n",
" normal_right4_11[0][0] \n",
"__________________________________________________________________________________________________\n",
"normal_add_5_11 (Add) (None, 1, 2, 176) 0 separable_conv_2_bn_normal_left5_\n",
" normal_bn_1_11[0][0] \n",
"__________________________________________________________________________________________________\n",
"normal_concat_11 (Concatenate) (None, 1, 2, 1056) 0 adjust_bn_11[0][0] \n",
" normal_add_1_11[0][0] \n",
" normal_add_2_11[0][0] \n",
" normal_add_3_11[0][0] \n",
" normal_add_4_11[0][0] \n",
" normal_add_5_11[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_175 (Activation) (None, 1, 2, 1056) 0 normal_concat_10[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_176 (Activation) (None, 1, 2, 1056) 0 normal_concat_11[0][0] \n",
"__________________________________________________________________________________________________\n",
"adjust_conv_projection_12 (Conv (None, 1, 2, 176) 185856 activation_175[0][0] \n",
"__________________________________________________________________________________________________\n",
"normal_conv_1_12 (Conv2D) (None, 1, 2, 176) 185856 activation_176[0][0] \n",
"__________________________________________________________________________________________________\n",
"adjust_bn_12 (BatchNormalizatio (None, 1, 2, 176) 704 adjust_conv_projection_12[0][0] \n",
"__________________________________________________________________________________________________\n",
"normal_bn_1_12 (BatchNormalizat (None, 1, 2, 176) 704 normal_conv_1_12[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_177 (Activation) (None, 1, 2, 176) 0 normal_bn_1_12[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_179 (Activation) (None, 1, 2, 176) 0 adjust_bn_12[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_181 (Activation) (None, 1, 2, 176) 0 adjust_bn_12[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_183 (Activation) (None, 1, 2, 176) 0 adjust_bn_12[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_185 (Activation) (None, 1, 2, 176) 0 normal_bn_1_12[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_normal_left1_1 (None, 1, 2, 176) 35376 activation_177[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_normal_right1_ (None, 1, 2, 176) 32560 activation_179[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_normal_left2_1 (None, 1, 2, 176) 35376 activation_181[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_normal_right2_ (None, 1, 2, 176) 32560 activation_183[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_normal_left5_1 (None, 1, 2, 176) 32560 activation_185[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_bn_normal_left (None, 1, 2, 176) 704 separable_conv_1_normal_left1_12[\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_bn_normal_righ (None, 1, 2, 176) 704 separable_conv_1_normal_right1_12\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_bn_normal_left (None, 1, 2, 176) 704 separable_conv_1_normal_left2_12[\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_bn_normal_righ (None, 1, 2, 176) 704 separable_conv_1_normal_right2_12\n",
"__________________________________________________________________________________________________\n",
"separable_conv_1_bn_normal_left (None, 1, 2, 176) 704 separable_conv_1_normal_left5_12[\n",
"__________________________________________________________________________________________________\n",
"activation_178 (Activation) (None, 1, 2, 176) 0 separable_conv_1_bn_normal_left1_\n",
"__________________________________________________________________________________________________\n",
"activation_180 (Activation) (None, 1, 2, 176) 0 separable_conv_1_bn_normal_right1\n",
"__________________________________________________________________________________________________\n",
"activation_182 (Activation) (None, 1, 2, 176) 0 separable_conv_1_bn_normal_left2_\n",
"__________________________________________________________________________________________________\n",
"activation_184 (Activation) (None, 1, 2, 176) 0 separable_conv_1_bn_normal_right2\n",
"__________________________________________________________________________________________________\n",
"activation_186 (Activation) (None, 1, 2, 176) 0 separable_conv_1_bn_normal_left5_\n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_normal_left1_1 (None, 1, 2, 176) 35376 activation_178[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_normal_right1_ (None, 1, 2, 176) 32560 activation_180[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_normal_left2_1 (None, 1, 2, 176) 35376 activation_182[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_normal_right2_ (None, 1, 2, 176) 32560 activation_184[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_normal_left5_1 (None, 1, 2, 176) 32560 activation_186[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_bn_normal_left (None, 1, 2, 176) 704 separable_conv_2_normal_left1_12[\n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_bn_normal_righ (None, 1, 2, 176) 704 separable_conv_2_normal_right1_12\n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_bn_normal_left (None, 1, 2, 176) 704 separable_conv_2_normal_left2_12[\n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_bn_normal_righ (None, 1, 2, 176) 704 separable_conv_2_normal_right2_12\n",
"__________________________________________________________________________________________________\n",
"normal_left3_12 (AveragePooling (None, 1, 2, 176) 0 normal_bn_1_12[0][0] \n",
"__________________________________________________________________________________________________\n",
"normal_left4_12 (AveragePooling (None, 1, 2, 176) 0 adjust_bn_12[0][0] \n",
"__________________________________________________________________________________________________\n",
"normal_right4_12 (AveragePoolin (None, 1, 2, 176) 0 adjust_bn_12[0][0] \n",
"__________________________________________________________________________________________________\n",
"separable_conv_2_bn_normal_left (None, 1, 2, 176) 704 separable_conv_2_normal_left5_12[\n",
"__________________________________________________________________________________________________\n",
"normal_add_1_12 (Add) (None, 1, 2, 176) 0 separable_conv_2_bn_normal_left1_\n",
" separable_conv_2_bn_normal_right1\n",
"__________________________________________________________________________________________________\n",
"normal_add_2_12 (Add) (None, 1, 2, 176) 0 separable_conv_2_bn_normal_left2_\n",
" separable_conv_2_bn_normal_right2\n",
"__________________________________________________________________________________________________\n",
"normal_add_3_12 (Add) (None, 1, 2, 176) 0 normal_left3_12[0][0] \n",
" adjust_bn_12[0][0] \n",
"__________________________________________________________________________________________________\n",
"normal_add_4_12 (Add) (None, 1, 2, 176) 0 normal_left4_12[0][0] \n",
" normal_right4_12[0][0] \n",
"__________________________________________________________________________________________________\n",
"normal_add_5_12 (Add) (None, 1, 2, 176) 0 separable_conv_2_bn_normal_left5_\n",
" normal_bn_1_12[0][0] \n",
"__________________________________________________________________________________________________\n",
"normal_concat_12 (Concatenate) (None, 1, 2, 1056) 0 adjust_bn_12[0][0] \n",
" normal_add_1_12[0][0] \n",
" normal_add_2_12[0][0] \n",
" normal_add_3_12[0][0] \n",
" normal_add_4_12[0][0] \n",
" normal_add_5_12[0][0] \n",
"__________________________________________________________________________________________________\n",
"activation_187 (Activation) (None, 1, 2, 1056) 0 normal_concat_12[0][0] \n",
"__________________________________________________________________________________________________\n",
"global_average_pooling2d (Globa (None, 1056) 0 activation_187[0][0] \n",
"__________________________________________________________________________________________________\n",
"dense (Dense) (None, 2) 2114 global_average_pooling2d[0][0] \n",
"==================================================================================================\n",
"Total params: 4,271,254\n",
"Trainable params: 4,234,516\n",
"Non-trainable params: 36,738\n",
"__________________________________________________________________________________________________\n",
"None\n"
]
},
{
"data": {
"text/plain": [
"'\\nmodel = tf.keras.Sequential(\\n [\\n tf.keras.Input(shape=(img_width, img_height, 3)),\\n tf.keras.layers.Conv2D(32, kernel_size=(3, 3), activation=\"relu\"),\\n tf.keras.layers.MaxPooling2D(pool_size=(2, 2)),\\n tf.keras.layers.Conv2D(32, kernel_size=(3, 3), activation=\"relu\"),\\n tf.keras.layers.MaxPooling2D(pool_size=(2, 2)),\\n tf.keras.layers.Conv2D(32, kernel_size=(3, 3), activation=\"relu\"),\\n tf.keras.layers.MaxPooling2D(pool_size=(2, 2)),\\n tf.keras.layers.Flatten(),\\n tf.keras.layers.Dropout(0.5),\\n tf.keras.layers.Dense(nb_classes, activation=\"softmax\"),\\n ]\\n)\\n\\nmodel.summary()\\n'"
]
},
"execution_count": 3,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"\n",
"#base_model = tf.keras.applications.Xception(input_shape=(img_width, img_height, 3), weights='imagenet', include_top=False)\n",
"#base_model = tf.keras.applications.MobileNetV2(input_shape=(img_width, img_height, 1), weights=None, include_top=False)\n",
"base_model = tf.keras.applications.NASNetMobile(input_shape=(img_width, img_height, 1), weights=None, include_top=False) \n",
"#base_model = tf.keras.applications.Xception(input_shape=(img_width, img_height, 1), weights=None, include_top=False) \n",
"\n",
"# Top Model Block\n",
"x = base_model.output\n",
"x = GlobalAveragePooling2D()(x)\n",
"predictions = Dense(nb_classes, activation='softmax')(x)\n",
"\n",
"# add your top layer block to your base model\n",
"model = tf.keras.Model(base_model.input, predictions)\n",
"\n",
"# # let's visualize layer names and layer indices to see how many layers/blocks to re-train\n",
"# # uncomment when choosing based_model_last_block_layer\n",
"# for i, layer in enumerate(model.layers):\n",
"# print(i, layer.name)\n",
"\n",
"# first: train only the top layers (which were randomly initialized)\n",
"# i.e. freeze all layers of the based model that is already pre-trained.\n",
"for layer in base_model.layers:\n",
" layer.trainable = True\n",
"\n",
"print(model.summary())\n",
"\n",
"\"\"\"\n",
"model = tf.keras.Sequential(\n",
" [\n",
" tf.keras.Input(shape=(img_width, img_height, 3)),\n",
" tf.keras.layers.Conv2D(32, kernel_size=(3, 3), activation=\"relu\"),\n",
" tf.keras.layers.MaxPooling2D(pool_size=(2, 2)),\n",
" tf.keras.layers.Conv2D(32, kernel_size=(3, 3), activation=\"relu\"),\n",
" tf.keras.layers.MaxPooling2D(pool_size=(2, 2)),\n",
" tf.keras.layers.Conv2D(32, kernel_size=(3, 3), activation=\"relu\"),\n",
" tf.keras.layers.MaxPooling2D(pool_size=(2, 2)),\n",
" tf.keras.layers.Flatten(),\n",
" tf.keras.layers.Dropout(0.5),\n",
" tf.keras.layers.Dense(nb_classes, activation=\"softmax\"),\n",
" ]\n",
")\n",
"\n",
"model.summary()\n",
"\"\"\""
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {
"scrolled": true
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Found 1511 images belonging to 2 classes.\n",
"Found 377 images belonging to 2 classes.\n",
"WARNING:tensorflow:sample_weight modes were coerced from\n",
" ...\n",
" to \n",
" ['...']\n",
"WARNING:tensorflow:sample_weight modes were coerced from\n",
" ...\n",
" to \n",
" ['...']\n",
"Train for 302 steps, validate for 75 steps\n",
"Epoch 1/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.8129 - accuracy: 0.4875\n",
"Epoch 00001: val_loss improved from inf to 4.80805, saving model to /home/dlsaavedra/Desktop/Rentadrone.cl-ai-test&SomeCode/model-definition/GPS_Panel/Classifier/model_2/top_model_weights.h5\n",
"302/302 [==============================] - 53s 177ms/step - loss: 0.8136 - accuracy: 0.4884 - val_loss: 4.8080 - val_accuracy: 1.0000\n",
"Epoch 2/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.8003 - accuracy: 0.4464\n",
"Epoch 00002: val_loss improved from 4.80805 to 1.74620, saving model to /home/dlsaavedra/Desktop/Rentadrone.cl-ai-test&SomeCode/model-definition/GPS_Panel/Classifier/model_2/top_model_weights.h5\n",
"302/302 [==============================] - 27s 88ms/step - loss: 0.8011 - accuracy: 0.4466 - val_loss: 1.7462 - val_accuracy: 0.8133\n",
"Epoch 3/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7712 - accuracy: 0.3970\n",
"Epoch 00003: val_loss improved from 1.74620 to 0.86740, saving model to /home/dlsaavedra/Desktop/Rentadrone.cl-ai-test&SomeCode/model-definition/GPS_Panel/Classifier/model_2/top_model_weights.h5\n",
"302/302 [==============================] - 29s 97ms/step - loss: 0.7710 - accuracy: 0.3982 - val_loss: 0.8674 - val_accuracy: 0.8667\n",
"Epoch 4/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7743 - accuracy: 0.5274\n",
"Epoch 00004: val_loss improved from 0.86740 to 0.75537, saving model to /home/dlsaavedra/Desktop/Rentadrone.cl-ai-test&SomeCode/model-definition/GPS_Panel/Classifier/model_2/top_model_weights.h5\n",
"302/302 [==============================] - 28s 91ms/step - loss: 0.7740 - accuracy: 0.5273 - val_loss: 0.7554 - val_accuracy: 0.9267\n",
"Epoch 5/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7839 - accuracy: 0.6271\n",
"Epoch 00005: val_loss improved from 0.75537 to 0.74694, saving model to /home/dlsaavedra/Desktop/Rentadrone.cl-ai-test&SomeCode/model-definition/GPS_Panel/Classifier/model_2/top_model_weights.h5\n",
"302/302 [==============================] - 28s 92ms/step - loss: 0.7825 - accuracy: 0.6283 - val_loss: 0.7469 - val_accuracy: 0.2133\n",
"Epoch 6/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7700 - accuracy: 0.6121\n",
"Epoch 00006: val_loss did not improve from 0.74694\n",
"302/302 [==============================] - 26s 87ms/step - loss: 0.7698 - accuracy: 0.6118 - val_loss: 0.7491 - val_accuracy: 0.0000e+00\n",
"Epoch 7/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7796 - accuracy: 0.5520\n",
"Epoch 00007: val_loss improved from 0.74694 to 0.74501, saving model to /home/dlsaavedra/Desktop/Rentadrone.cl-ai-test&SomeCode/model-definition/GPS_Panel/Classifier/model_2/top_model_weights.h5\n",
"302/302 [==============================] - 28s 91ms/step - loss: 0.7804 - accuracy: 0.5510 - val_loss: 0.7450 - val_accuracy: 0.8833\n",
"Epoch 8/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7803 - accuracy: 0.5295\n",
"Epoch 00008: val_loss did not improve from 0.74501\n",
"302/302 [==============================] - 26s 86ms/step - loss: 0.7800 - accuracy: 0.5286 - val_loss: 0.7602 - val_accuracy: 0.9700\n",
"Epoch 9/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7753 - accuracy: 0.5021\n",
"Epoch 00009: val_loss did not improve from 0.74501\n",
"302/302 [==============================] - 26s 86ms/step - loss: 0.7762 - accuracy: 0.5012 - val_loss: 0.7533 - val_accuracy: 1.0000\n",
"Epoch 10/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7586 - accuracy: 0.5470\n",
"Epoch 00010: val_loss improved from 0.74501 to 0.74411, saving model to /home/dlsaavedra/Desktop/Rentadrone.cl-ai-test&SomeCode/model-definition/GPS_Panel/Classifier/model_2/top_model_weights.h5\n",
"302/302 [==============================] - 27s 91ms/step - loss: 0.7596 - accuracy: 0.5460 - val_loss: 0.7441 - val_accuracy: 0.6000\n",
"Epoch 11/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7865 - accuracy: 0.4975\n",
"Epoch 00011: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 26s 86ms/step - loss: 0.7862 - accuracy: 0.4992 - val_loss: 0.7442 - val_accuracy: 0.7400\n",
"Epoch 12/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7701 - accuracy: 0.5166\n",
"Epoch 00012: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 26s 87ms/step - loss: 0.7710 - accuracy: 0.5157 - val_loss: 0.7484 - val_accuracy: 0.9700\n",
"Epoch 13/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7850 - accuracy: 0.5436\n",
"Epoch 00013: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 26s 87ms/step - loss: 0.7824 - accuracy: 0.5452 - val_loss: 0.7456 - val_accuracy: 0.8833\n",
"Epoch 14/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7870 - accuracy: 0.5137\n",
"Epoch 00014: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 26s 87ms/step - loss: 0.7867 - accuracy: 0.5145 - val_loss: 0.7480 - val_accuracy: 0.9133\n",
"Epoch 15/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7820 - accuracy: 0.4988\n",
"Epoch 00015: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 26s 86ms/step - loss: 0.7840 - accuracy: 0.4996 - val_loss: 0.7802 - val_accuracy: 0.9700\n",
"Epoch 16/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7658 - accuracy: 0.4921\n",
"Epoch 00016: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 26s 87ms/step - loss: 0.7667 - accuracy: 0.4921 - val_loss: 0.7669 - val_accuracy: 0.9600\n",
"Epoch 17/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7635 - accuracy: 0.4938\n",
"Epoch 00017: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 26s 86ms/step - loss: 0.7644 - accuracy: 0.4930 - val_loss: 0.7685 - val_accuracy: 1.0000\n",
"Epoch 18/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7780 - accuracy: 0.5004\n",
"Epoch 00018: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 26s 86ms/step - loss: 0.7777 - accuracy: 0.4988 - val_loss: 0.7746 - val_accuracy: 1.0000\n",
"Epoch 19/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7775 - accuracy: 0.4356\n",
"Epoch 00019: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 26s 87ms/step - loss: 0.7784 - accuracy: 0.4350 - val_loss: 0.7941 - val_accuracy: 1.0000\n",
"Epoch 20/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7569 - accuracy: 0.4705\n",
"Epoch 00020: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 26s 87ms/step - loss: 0.7567 - accuracy: 0.4698 - val_loss: 0.8164 - val_accuracy: 1.0000\n",
"Epoch 21/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7728 - accuracy: 0.4651\n",
"Epoch 00021: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 26s 87ms/step - loss: 0.7703 - accuracy: 0.4669 - val_loss: 0.8269 - val_accuracy: 1.0000\n",
"Epoch 22/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7599 - accuracy: 0.4530\n",
"Epoch 00022: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 26s 87ms/step - loss: 0.7596 - accuracy: 0.4532 - val_loss: 0.8224 - val_accuracy: 1.0000\n",
"Epoch 23/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7759 - accuracy: 0.4228\n",
"Epoch 00023: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 26s 87ms/step - loss: 0.7779 - accuracy: 0.4222 - val_loss: 0.8362 - val_accuracy: 1.0000\n",
"Epoch 24/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7794 - accuracy: 0.4161\n",
"Epoch 00024: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 27s 88ms/step - loss: 0.7792 - accuracy: 0.4164 - val_loss: 0.8070 - val_accuracy: 1.0000\n",
"Epoch 25/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7848 - accuracy: 0.4518\n",
"Epoch 00025: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 26s 87ms/step - loss: 0.7833 - accuracy: 0.4528 - val_loss: 0.8218 - val_accuracy: 1.0000\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Epoch 26/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7792 - accuracy: 0.4879\n",
"Epoch 00026: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 29s 95ms/step - loss: 0.7789 - accuracy: 0.4880 - val_loss: 0.8188 - val_accuracy: 1.0000\n",
"Epoch 27/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7606 - accuracy: 0.4485\n",
"Epoch 00027: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 29s 96ms/step - loss: 0.7615 - accuracy: 0.4478 - val_loss: 0.7667 - val_accuracy: 1.0000\n",
"Epoch 28/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7733 - accuracy: 0.4663\n",
"Epoch 00028: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 27s 89ms/step - loss: 0.7731 - accuracy: 0.4673 - val_loss: 0.7690 - val_accuracy: 1.0000\n",
"Epoch 29/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7709 - accuracy: 0.4763\n",
"Epoch 00029: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 26s 86ms/step - loss: 0.7718 - accuracy: 0.4780 - val_loss: 0.7542 - val_accuracy: 1.0000\n",
"Epoch 30/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7863 - accuracy: 0.4389\n",
"Epoch 00030: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7848 - accuracy: 0.4408 - val_loss: 0.7531 - val_accuracy: 1.0000\n",
"Epoch 31/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7751 - accuracy: 0.4347\n",
"Epoch 00031: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7737 - accuracy: 0.4366 - val_loss: 0.7635 - val_accuracy: 1.0000\n",
"Epoch 32/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7767 - accuracy: 0.4447\n",
"Epoch 00032: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7776 - accuracy: 0.4449 - val_loss: 0.7726 - val_accuracy: 1.0000\n",
"Epoch 33/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7748 - accuracy: 0.4248\n",
"Epoch 00033: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7722 - accuracy: 0.4258 - val_loss: 0.7670 - val_accuracy: 1.0000\n",
"Epoch 34/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7697 - accuracy: 0.4468\n",
"Epoch 00034: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 84ms/step - loss: 0.7695 - accuracy: 0.4478 - val_loss: 0.7664 - val_accuracy: 1.0000\n",
"Epoch 35/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7763 - accuracy: 0.4214\n",
"Epoch 00035: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 82ms/step - loss: 0.7772 - accuracy: 0.4217 - val_loss: 0.8123 - val_accuracy: 1.0000\n",
"Epoch 36/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7690 - accuracy: 0.4680\n",
"Epoch 00036: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7676 - accuracy: 0.4689 - val_loss: 0.7774 - val_accuracy: 1.0000\n",
"Epoch 37/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7864 - accuracy: 0.4347\n",
"Epoch 00037: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7861 - accuracy: 0.4341 - val_loss: 0.7785 - val_accuracy: 1.0000\n",
"Epoch 38/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7687 - accuracy: 0.4073\n",
"Epoch 00038: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7686 - accuracy: 0.4068 - val_loss: 0.8420 - val_accuracy: 1.0000\n",
"Epoch 39/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7694 - accuracy: 0.4555\n",
"Epoch 00039: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 84ms/step - loss: 0.7715 - accuracy: 0.4557 - val_loss: 0.8583 - val_accuracy: 1.0000\n",
"Epoch 40/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7752 - accuracy: 0.4190\n",
"Epoch 00040: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7761 - accuracy: 0.4184 - val_loss: 0.8174 - val_accuracy: 1.0000\n",
"Epoch 41/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7805 - accuracy: 0.4015\n",
"Epoch 00041: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7814 - accuracy: 0.4002 - val_loss: 0.7887 - val_accuracy: 1.0000\n",
"Epoch 42/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7875 - accuracy: 0.4098\n",
"Epoch 00042: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7872 - accuracy: 0.4101 - val_loss: 0.8108 - val_accuracy: 1.0000\n",
"Epoch 43/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7771 - accuracy: 0.4256\n",
"Epoch 00043: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7768 - accuracy: 0.4258 - val_loss: 0.7922 - val_accuracy: 1.0000\n",
"Epoch 44/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7761 - accuracy: 0.4331\n",
"Epoch 00044: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7759 - accuracy: 0.4333 - val_loss: 0.7728 - val_accuracy: 0.9967\n",
"Epoch 45/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7671 - accuracy: 0.4439\n",
"Epoch 00045: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7657 - accuracy: 0.4457 - val_loss: 0.7682 - val_accuracy: 0.9933\n",
"Epoch 46/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7696 - accuracy: 0.4372\n",
"Epoch 00046: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 84ms/step - loss: 0.7694 - accuracy: 0.4374 - val_loss: 0.7857 - val_accuracy: 0.9733\n",
"Epoch 47/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7748 - accuracy: 0.4630\n",
"Epoch 00047: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7746 - accuracy: 0.4640 - val_loss: 0.8694 - val_accuracy: 0.9067\n",
"Epoch 48/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7586 - accuracy: 0.4738\n",
"Epoch 00048: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 84ms/step - loss: 0.7607 - accuracy: 0.4722 - val_loss: 1.0086 - val_accuracy: 0.8200\n",
"Epoch 49/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7781 - accuracy: 0.4190\n",
"Epoch 00049: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 26s 85ms/step - loss: 0.7790 - accuracy: 0.4176 - val_loss: 0.9529 - val_accuracy: 0.8367\n",
"Epoch 50/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7893 - accuracy: 0.4447\n",
"Epoch 00050: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 26s 86ms/step - loss: 0.7902 - accuracy: 0.4449 - val_loss: 0.8366 - val_accuracy: 0.9033\n",
"Epoch 51/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7722 - accuracy: 0.4414\n",
"Epoch 00051: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7731 - accuracy: 0.4399 - val_loss: 0.8979 - val_accuracy: 0.8667\n",
"Epoch 52/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7709 - accuracy: 0.4319\n",
"Epoch 00052: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7729 - accuracy: 0.4313 - val_loss: 1.1077 - val_accuracy: 0.7033\n",
"Epoch 53/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7911 - accuracy: 0.4510\n",
"Epoch 00053: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7908 - accuracy: 0.4512 - val_loss: 0.9211 - val_accuracy: 0.7567\n",
"Epoch 54/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7599 - accuracy: 0.4206\n",
"Epoch 00054: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7596 - accuracy: 0.4209 - val_loss: 1.0797 - val_accuracy: 0.7333\n",
"Epoch 55/500\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"301/302 [============================>.] - ETA: 0s - loss: 0.7825 - accuracy: 0.4331\n",
"Epoch 00055: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7834 - accuracy: 0.4333 - val_loss: 1.1013 - val_accuracy: 0.6667\n",
"Epoch 56/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7827 - accuracy: 0.4228\n",
"Epoch 00056: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 82ms/step - loss: 0.7825 - accuracy: 0.4230 - val_loss: 0.9862 - val_accuracy: 0.6067\n",
"Epoch 57/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7722 - accuracy: 0.4186\n",
"Epoch 00057: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7720 - accuracy: 0.4180 - val_loss: 1.1431 - val_accuracy: 0.5800\n",
"Epoch 58/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7732 - accuracy: 0.4381\n",
"Epoch 00058: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7741 - accuracy: 0.4383 - val_loss: 1.1785 - val_accuracy: 0.5233\n",
"Epoch 59/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7854 - accuracy: 0.4796\n",
"Epoch 00059: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7851 - accuracy: 0.4797 - val_loss: 1.4063 - val_accuracy: 0.5433\n",
"Epoch 60/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7793 - accuracy: 0.4796\n",
"Epoch 00060: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7767 - accuracy: 0.4805 - val_loss: 1.1028 - val_accuracy: 0.3733\n",
"Epoch 61/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7805 - accuracy: 0.4680\n",
"Epoch 00061: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 26s 87ms/step - loss: 0.7790 - accuracy: 0.4681 - val_loss: 1.4055 - val_accuracy: 0.0367\n",
"Epoch 62/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7951 - accuracy: 0.4705\n",
"Epoch 00062: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7960 - accuracy: 0.4706 - val_loss: 1.4515 - val_accuracy: 0.5567\n",
"Epoch 63/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7804 - accuracy: 0.4422\n",
"Epoch 00063: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7813 - accuracy: 0.4424 - val_loss: 1.2250 - val_accuracy: 0.0233\n",
"Epoch 64/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7778 - accuracy: 0.4539\n",
"Epoch 00064: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7764 - accuracy: 0.4532 - val_loss: 1.0249 - val_accuracy: 0.0333\n",
"Epoch 65/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7875 - accuracy: 0.5046\n",
"Epoch 00065: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7884 - accuracy: 0.5046 - val_loss: 1.5478 - val_accuracy: 0.5433\n",
"Epoch 66/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7923 - accuracy: 0.4971\n",
"Epoch 00066: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 84ms/step - loss: 0.7897 - accuracy: 0.4971 - val_loss: 1.3436 - val_accuracy: 0.1000\n",
"Epoch 67/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7826 - accuracy: 0.4904\n",
"Epoch 00067: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 84ms/step - loss: 0.7812 - accuracy: 0.4905 - val_loss: 1.0646 - val_accuracy: 0.5267\n",
"Epoch 68/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7666 - accuracy: 0.4805\n",
"Epoch 00068: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7664 - accuracy: 0.4805 - val_loss: 1.0903 - val_accuracy: 0.5267\n",
"Epoch 69/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7919 - accuracy: 0.4722\n",
"Epoch 00069: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7916 - accuracy: 0.4722 - val_loss: 1.0129 - val_accuracy: 0.5000\n",
"Epoch 70/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7680 - accuracy: 0.4722\n",
"Epoch 00070: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7678 - accuracy: 0.4722 - val_loss: 1.0083 - val_accuracy: 0.2000\n",
"Epoch 71/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7895 - accuracy: 0.4992\n",
"Epoch 00071: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7892 - accuracy: 0.4983 - val_loss: 1.0189 - val_accuracy: 0.4700\n",
"Epoch 72/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7910 - accuracy: 0.4597\n",
"Epoch 00072: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7907 - accuracy: 0.4598 - val_loss: 0.9127 - val_accuracy: 0.4067\n",
"Epoch 73/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7794 - accuracy: 0.5004\n",
"Epoch 00073: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 27s 90ms/step - loss: 0.7780 - accuracy: 0.4996 - val_loss: 0.9719 - val_accuracy: 0.3833\n",
"Epoch 74/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7857 - accuracy: 0.4963\n",
"Epoch 00074: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 29s 97ms/step - loss: 0.7854 - accuracy: 0.4971 - val_loss: 0.8935 - val_accuracy: 0.4167\n",
"Epoch 75/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7817 - accuracy: 0.4788\n",
"Epoch 00075: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 28s 91ms/step - loss: 0.7826 - accuracy: 0.4797 - val_loss: 0.9054 - val_accuracy: 0.4900\n",
"Epoch 76/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7791 - accuracy: 0.4871\n",
"Epoch 00076: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 84ms/step - loss: 0.7788 - accuracy: 0.4872 - val_loss: 0.8693 - val_accuracy: 0.4633\n",
"Epoch 77/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7811 - accuracy: 0.4638\n",
"Epoch 00077: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7808 - accuracy: 0.4656 - val_loss: 0.8807 - val_accuracy: 0.0867\n",
"Epoch 78/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7931 - accuracy: 0.4805\n",
"Epoch 00078: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7928 - accuracy: 0.4797 - val_loss: 0.8032 - val_accuracy: 0.2933\n",
"Epoch 79/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7581 - accuracy: 0.4821\n",
"Epoch 00079: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7578 - accuracy: 0.4830 - val_loss: 1.0027 - val_accuracy: 0.4867\n",
"Epoch 80/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7677 - accuracy: 0.4975\n",
"Epoch 00080: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7697 - accuracy: 0.4967 - val_loss: 1.1468 - val_accuracy: 0.4800\n",
"Epoch 81/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7866 - accuracy: 0.5042\n",
"Epoch 00081: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7875 - accuracy: 0.5050 - val_loss: 1.0572 - val_accuracy: 0.0100\n",
"Epoch 82/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7786 - accuracy: 0.5021\n",
"Epoch 00082: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7783 - accuracy: 0.5037 - val_loss: 0.9846 - val_accuracy: 0.4500\n",
"Epoch 83/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7726 - accuracy: 0.5137\n",
"Epoch 00083: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 84ms/step - loss: 0.7712 - accuracy: 0.5145 - val_loss: 0.8292 - val_accuracy: 0.4867\n",
"Epoch 84/500\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"301/302 [============================>.] - ETA: 0s - loss: 0.7928 - accuracy: 0.5083\n",
"Epoch 00084: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7913 - accuracy: 0.5075 - val_loss: 0.8730 - val_accuracy: 0.1533\n",
"Epoch 85/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7559 - accuracy: 0.4971\n",
"Epoch 00085: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7546 - accuracy: 0.4971 - val_loss: 1.0342 - val_accuracy: 0.1933\n",
"Epoch 86/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7884 - accuracy: 0.4672\n",
"Epoch 00086: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7869 - accuracy: 0.4673 - val_loss: 1.0285 - val_accuracy: 0.0133\n",
"Epoch 87/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7754 - accuracy: 0.4950\n",
"Epoch 00087: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7751 - accuracy: 0.4950 - val_loss: 1.0340 - val_accuracy: 0.0100\n",
"Epoch 88/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7721 - accuracy: 0.4680\n",
"Epoch 00088: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7719 - accuracy: 0.4681 - val_loss: 1.0283 - val_accuracy: 0.0367\n",
"Epoch 89/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7729 - accuracy: 0.4746\n",
"Epoch 00089: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7738 - accuracy: 0.4739 - val_loss: 1.0074 - val_accuracy: 0.1000\n",
"Epoch 90/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7896 - accuracy: 0.4796\n",
"Epoch 00090: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7905 - accuracy: 0.4797 - val_loss: 1.1443 - val_accuracy: 0.3100\n",
"Epoch 91/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7679 - accuracy: 0.4975\n",
"Epoch 00091: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7677 - accuracy: 0.4975 - val_loss: 0.9433 - val_accuracy: 0.5200\n",
"Epoch 92/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7628 - accuracy: 0.4863\n",
"Epoch 00092: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7614 - accuracy: 0.4855 - val_loss: 1.1351 - val_accuracy: 0.4833\n",
"Epoch 93/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7633 - accuracy: 0.4913\n",
"Epoch 00093: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7631 - accuracy: 0.4913 - val_loss: 1.0528 - val_accuracy: 0.4733\n",
"Epoch 94/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7867 - accuracy: 0.5008\n",
"Epoch 00094: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7864 - accuracy: 0.5000 - val_loss: 0.9083 - val_accuracy: 0.4100\n",
"Epoch 95/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.8101 - accuracy: 0.4896\n",
"Epoch 00095: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.8109 - accuracy: 0.4905 - val_loss: 0.8219 - val_accuracy: 0.4567\n",
"Epoch 96/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7670 - accuracy: 0.4597\n",
"Epoch 00096: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7679 - accuracy: 0.4590 - val_loss: 0.8719 - val_accuracy: 0.5033\n",
"Epoch 97/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7717 - accuracy: 0.4697\n",
"Epoch 00097: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 26s 88ms/step - loss: 0.7703 - accuracy: 0.4706 - val_loss: 0.8803 - val_accuracy: 0.5067\n",
"Epoch 98/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7894 - accuracy: 0.4738\n",
"Epoch 00098: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7880 - accuracy: 0.4731 - val_loss: 0.8777 - val_accuracy: 0.4800\n",
"Epoch 99/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7675 - accuracy: 0.5012\n",
"Epoch 00099: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7685 - accuracy: 0.5012 - val_loss: 0.8756 - val_accuracy: 0.0900\n",
"Epoch 100/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7732 - accuracy: 0.4668\n",
"Epoch 00100: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7730 - accuracy: 0.4669 - val_loss: 0.8951 - val_accuracy: 0.4667\n",
"Epoch 101/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7649 - accuracy: 0.5046\n",
"Epoch 00101: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7658 - accuracy: 0.5037 - val_loss: 1.0292 - val_accuracy: 0.4800\n",
"Epoch 102/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7954 - accuracy: 0.4622\n",
"Epoch 00102: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 84ms/step - loss: 0.7962 - accuracy: 0.4615 - val_loss: 0.8690 - val_accuracy: 0.4700\n",
"Epoch 103/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7960 - accuracy: 0.4618\n",
"Epoch 00103: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7957 - accuracy: 0.4611 - val_loss: 1.1179 - val_accuracy: 0.4967\n",
"Epoch 104/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7871 - accuracy: 0.4713\n",
"Epoch 00104: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7891 - accuracy: 0.4722 - val_loss: 0.8508 - val_accuracy: 0.4200\n",
"Epoch 105/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7781 - accuracy: 0.4613\n",
"Epoch 00105: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7790 - accuracy: 0.4606 - val_loss: 0.9431 - val_accuracy: 0.5500\n",
"Epoch 106/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7662 - accuracy: 0.4796\n",
"Epoch 00106: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7659 - accuracy: 0.4797 - val_loss: 0.8411 - val_accuracy: 0.4933\n",
"Epoch 107/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7742 - accuracy: 0.4734\n",
"Epoch 00107: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7739 - accuracy: 0.4727 - val_loss: 0.7741 - val_accuracy: 0.4967\n",
"Epoch 108/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7817 - accuracy: 0.4871\n",
"Epoch 00108: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7814 - accuracy: 0.4880 - val_loss: 0.9330 - val_accuracy: 0.0933\n",
"Epoch 109/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7806 - accuracy: 0.4904\n",
"Epoch 00109: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 84ms/step - loss: 0.7815 - accuracy: 0.4905 - val_loss: 0.8704 - val_accuracy: 0.5000\n",
"Epoch 110/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7719 - accuracy: 0.4846\n",
"Epoch 00110: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 84ms/step - loss: 0.7716 - accuracy: 0.4855 - val_loss: 0.9052 - val_accuracy: 0.6067\n",
"Epoch 111/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7889 - accuracy: 0.5075\n",
"Epoch 00111: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 84ms/step - loss: 0.7863 - accuracy: 0.5075 - val_loss: 0.9542 - val_accuracy: 0.5067\n",
"Epoch 112/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.8062 - accuracy: 0.4817\n",
"Epoch 00112: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 84ms/step - loss: 0.8058 - accuracy: 0.4810 - val_loss: 0.8221 - val_accuracy: 0.4500\n",
"Epoch 113/500\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"301/302 [============================>.] - ETA: 0s - loss: 0.7936 - accuracy: 0.4846\n",
"Epoch 00113: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 82ms/step - loss: 0.7947 - accuracy: 0.4855 - val_loss: 0.8096 - val_accuracy: 0.7667\n",
"Epoch 114/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7775 - accuracy: 0.4938\n",
"Epoch 00114: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 82ms/step - loss: 0.7795 - accuracy: 0.4938 - val_loss: 0.8010 - val_accuracy: 0.2600\n",
"Epoch 115/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7889 - accuracy: 0.5050\n",
"Epoch 00115: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 82ms/step - loss: 0.7887 - accuracy: 0.5050 - val_loss: 0.8136 - val_accuracy: 0.2433\n",
"Epoch 116/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7855 - accuracy: 0.5229\n",
"Epoch 00116: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 82ms/step - loss: 0.7864 - accuracy: 0.5236 - val_loss: 0.8274 - val_accuracy: 0.3833\n",
"Epoch 117/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7886 - accuracy: 0.5004\n",
"Epoch 00117: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7859 - accuracy: 0.4996 - val_loss: 0.8078 - val_accuracy: 0.0233\n",
"Epoch 118/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7830 - accuracy: 0.5129\n",
"Epoch 00118: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7827 - accuracy: 0.5128 - val_loss: 0.8005 - val_accuracy: 0.0200\n",
"Epoch 119/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7678 - accuracy: 0.5337\n",
"Epoch 00119: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7687 - accuracy: 0.5336 - val_loss: 0.7990 - val_accuracy: 0.4200\n",
"Epoch 120/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7775 - accuracy: 0.5345\n",
"Epoch 00120: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 26s 84ms/step - loss: 0.7749 - accuracy: 0.5344 - val_loss: 0.7994 - val_accuracy: 0.2433\n",
"Epoch 121/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7932 - accuracy: 0.5162\n",
"Epoch 00121: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 30s 98ms/step - loss: 0.7929 - accuracy: 0.5162 - val_loss: 0.7749 - val_accuracy: 0.1167\n",
"Epoch 122/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7756 - accuracy: 0.5478\n",
"Epoch 00122: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 29s 96ms/step - loss: 0.7742 - accuracy: 0.5476 - val_loss: 0.9855 - val_accuracy: 0.0200\n",
"Epoch 123/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7620 - accuracy: 0.5636\n",
"Epoch 00123: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 84ms/step - loss: 0.7617 - accuracy: 0.5634 - val_loss: 1.0608 - val_accuracy: 0.0200\n",
"Epoch 124/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7731 - accuracy: 0.5880\n",
"Epoch 00124: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7741 - accuracy: 0.5869 - val_loss: 1.0451 - val_accuracy: 0.4800\n",
"Epoch 125/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7884 - accuracy: 0.5556\n",
"Epoch 00125: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7881 - accuracy: 0.5563 - val_loss: 0.8954 - val_accuracy: 0.0433\n",
"Epoch 126/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7918 - accuracy: 0.5220\n",
"Epoch 00126: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7926 - accuracy: 0.5236 - val_loss: 0.7536 - val_accuracy: 0.9367\n",
"Epoch 127/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7754 - accuracy: 0.5615\n",
"Epoch 00127: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7740 - accuracy: 0.5604 - val_loss: 0.7508 - val_accuracy: 0.5533\n",
"Epoch 128/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7885 - accuracy: 0.5894\n",
"Epoch 00128: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 84ms/step - loss: 0.7882 - accuracy: 0.5899 - val_loss: 0.7688 - val_accuracy: 0.7400\n",
"Epoch 129/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7868 - accuracy: 0.5520\n",
"Epoch 00129: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 84ms/step - loss: 0.7865 - accuracy: 0.5518 - val_loss: 0.9817 - val_accuracy: 0.7300\n",
"Epoch 130/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7689 - accuracy: 0.5669\n",
"Epoch 00130: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7675 - accuracy: 0.5667 - val_loss: 0.8101 - val_accuracy: 0.8767\n",
"Epoch 131/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7814 - accuracy: 0.5520\n",
"Epoch 00131: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7823 - accuracy: 0.5518 - val_loss: 0.7657 - val_accuracy: 0.3167\n",
"Epoch 132/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7714 - accuracy: 0.5511\n",
"Epoch 00132: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7711 - accuracy: 0.5493 - val_loss: 0.9269 - val_accuracy: 0.1300\n",
"Epoch 133/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7849 - accuracy: 0.5187\n",
"Epoch 00133: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7835 - accuracy: 0.5186 - val_loss: 0.9829 - val_accuracy: 0.2700\n",
"Epoch 134/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7855 - accuracy: 0.5977\n",
"Epoch 00134: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7875 - accuracy: 0.5990 - val_loss: 1.0791 - val_accuracy: 0.3733\n",
"Epoch 135/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7968 - accuracy: 0.5839\n",
"Epoch 00135: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7964 - accuracy: 0.5844 - val_loss: 0.9587 - val_accuracy: 0.3367\n",
"Epoch 136/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7758 - accuracy: 0.6118\n",
"Epoch 00136: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 84ms/step - loss: 0.7756 - accuracy: 0.6123 - val_loss: 0.8101 - val_accuracy: 0.2367\n",
"Epoch 137/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7720 - accuracy: 0.5797\n",
"Epoch 00137: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7718 - accuracy: 0.5803 - val_loss: 0.7720 - val_accuracy: 0.3133\n",
"Epoch 138/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7927 - accuracy: 0.5594\n",
"Epoch 00138: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 84ms/step - loss: 0.7924 - accuracy: 0.5592 - val_loss: 0.7593 - val_accuracy: 0.3967\n",
"Epoch 139/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7742 - accuracy: 0.5719\n",
"Epoch 00139: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7751 - accuracy: 0.5725 - val_loss: 0.7840 - val_accuracy: 0.4600\n",
"Epoch 140/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7903 - accuracy: 0.6027\n",
"Epoch 00140: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7900 - accuracy: 0.6040 - val_loss: 0.7698 - val_accuracy: 0.8833\n",
"Epoch 141/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7662 - accuracy: 0.5686\n",
"Epoch 00141: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7671 - accuracy: 0.5684 - val_loss: 0.8015 - val_accuracy: 0.5467\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Epoch 142/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7810 - accuracy: 0.5960\n",
"Epoch 00142: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7796 - accuracy: 0.5965 - val_loss: 0.8038 - val_accuracy: 0.8600\n",
"Epoch 143/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7846 - accuracy: 0.5553\n",
"Epoch 00143: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 82ms/step - loss: 0.7843 - accuracy: 0.5543 - val_loss: 0.7557 - val_accuracy: 0.4967\n",
"Epoch 144/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7662 - accuracy: 0.5482\n",
"Epoch 00144: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 26s 85ms/step - loss: 0.7637 - accuracy: 0.5472 - val_loss: 0.7528 - val_accuracy: 0.4433\n",
"Epoch 145/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7976 - accuracy: 0.5121\n",
"Epoch 00145: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7973 - accuracy: 0.5120 - val_loss: 0.7859 - val_accuracy: 0.4200\n",
"Epoch 146/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7756 - accuracy: 0.5520\n",
"Epoch 00146: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7754 - accuracy: 0.5518 - val_loss: 0.7833 - val_accuracy: 0.4900\n",
"Epoch 147/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7736 - accuracy: 0.5611\n",
"Epoch 00147: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7710 - accuracy: 0.5609 - val_loss: 0.8011 - val_accuracy: 0.4433\n",
"Epoch 148/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7720 - accuracy: 0.5374\n",
"Epoch 00148: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7718 - accuracy: 0.5373 - val_loss: 0.7569 - val_accuracy: 0.1833\n",
"Epoch 149/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7689 - accuracy: 0.5536\n",
"Epoch 00149: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7710 - accuracy: 0.5543 - val_loss: 0.8809 - val_accuracy: 0.4067\n",
"Epoch 150/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7789 - accuracy: 0.5291\n",
"Epoch 00150: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7809 - accuracy: 0.5281 - val_loss: 0.7789 - val_accuracy: 0.3700\n",
"Epoch 151/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7729 - accuracy: 0.5689\n",
"Epoch 00151: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7727 - accuracy: 0.5687 - val_loss: 0.9300 - val_accuracy: 0.3067\n",
"Epoch 152/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7759 - accuracy: 0.5594\n",
"Epoch 00152: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7744 - accuracy: 0.5576 - val_loss: 0.7973 - val_accuracy: 0.2600\n",
"Epoch 153/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7659 - accuracy: 0.5631\n",
"Epoch 00153: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7656 - accuracy: 0.5637 - val_loss: 0.7591 - val_accuracy: 0.0767\n",
"Epoch 154/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7662 - accuracy: 0.5432\n",
"Epoch 00154: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7648 - accuracy: 0.5422 - val_loss: 0.8611 - val_accuracy: 0.4800\n",
"Epoch 155/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7706 - accuracy: 0.5025\n",
"Epoch 00155: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7715 - accuracy: 0.5025 - val_loss: 0.7661 - val_accuracy: 0.6867\n",
"Epoch 156/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7652 - accuracy: 0.5100\n",
"Epoch 00156: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 27s 89ms/step - loss: 0.7649 - accuracy: 0.5091 - val_loss: 0.7585 - val_accuracy: 0.4467\n",
"Epoch 157/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7882 - accuracy: 0.5420\n",
"Epoch 00157: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 84ms/step - loss: 0.7890 - accuracy: 0.5427 - val_loss: 0.7546 - val_accuracy: 0.3600\n",
"Epoch 158/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7577 - accuracy: 0.5503\n",
"Epoch 00158: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 84ms/step - loss: 0.7575 - accuracy: 0.5485 - val_loss: 0.7602 - val_accuracy: 0.1867\n",
"Epoch 159/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7823 - accuracy: 0.5320\n",
"Epoch 00159: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7820 - accuracy: 0.5319 - val_loss: 0.7984 - val_accuracy: 0.7500\n",
"Epoch 160/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7747 - accuracy: 0.5278\n",
"Epoch 00160: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7756 - accuracy: 0.5286 - val_loss: 0.8000 - val_accuracy: 0.6167\n",
"Epoch 161/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7843 - accuracy: 0.5303\n",
"Epoch 00161: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7840 - accuracy: 0.5302 - val_loss: 0.7663 - val_accuracy: 0.8100\n",
"Epoch 162/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7680 - accuracy: 0.5287\n",
"Epoch 00162: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7690 - accuracy: 0.5286 - val_loss: 0.7612 - val_accuracy: 0.3600\n",
"Epoch 163/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7819 - accuracy: 0.5503\n",
"Epoch 00163: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7793 - accuracy: 0.5485 - val_loss: 0.7651 - val_accuracy: 0.5833\n",
"Epoch 164/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7785 - accuracy: 0.5320\n",
"Epoch 00164: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7770 - accuracy: 0.5319 - val_loss: 0.7993 - val_accuracy: 0.5500\n",
"Epoch 165/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7879 - accuracy: 0.4838\n",
"Epoch 00165: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 84ms/step - loss: 0.7876 - accuracy: 0.4838 - val_loss: 0.8325 - val_accuracy: 0.6333\n",
"Epoch 166/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7745 - accuracy: 0.4963\n",
"Epoch 00166: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7720 - accuracy: 0.4971 - val_loss: 0.7742 - val_accuracy: 0.7900\n",
"Epoch 167/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7871 - accuracy: 0.5112\n",
"Epoch 00167: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7857 - accuracy: 0.5112 - val_loss: 0.7582 - val_accuracy: 0.6700\n",
"Epoch 168/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7761 - accuracy: 0.4979\n",
"Epoch 00168: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 29s 95ms/step - loss: 0.7747 - accuracy: 0.4971 - val_loss: 0.7734 - val_accuracy: 0.8533\n",
"Epoch 169/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7809 - accuracy: 0.4950\n",
"Epoch 00169: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 29s 97ms/step - loss: 0.7817 - accuracy: 0.4959 - val_loss: 0.7811 - val_accuracy: 0.7933\n",
"Epoch 170/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7900 - accuracy: 0.5046\n",
"Epoch 00170: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 26s 87ms/step - loss: 0.7920 - accuracy: 0.5046 - val_loss: 0.7734 - val_accuracy: 0.4167\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Epoch 171/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7774 - accuracy: 0.5054\n",
"Epoch 00171: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 26s 86ms/step - loss: 0.7771 - accuracy: 0.5062 - val_loss: 0.7739 - val_accuracy: 0.9467\n",
"Epoch 172/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7685 - accuracy: 0.4676\n",
"Epoch 00172: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7683 - accuracy: 0.4685 - val_loss: 0.7521 - val_accuracy: 0.9033\n",
"Epoch 173/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7918 - accuracy: 0.4913\n",
"Epoch 00173: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7938 - accuracy: 0.4930 - val_loss: 0.7620 - val_accuracy: 0.7667\n",
"Epoch 174/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7850 - accuracy: 0.4755\n",
"Epoch 00174: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7860 - accuracy: 0.4756 - val_loss: 0.9701 - val_accuracy: 0.4767\n",
"Epoch 175/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7865 - accuracy: 0.5012\n",
"Epoch 00175: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7850 - accuracy: 0.5004 - val_loss: 1.0353 - val_accuracy: 0.4733\n",
"Epoch 176/500\n",
"301/302 [============================>.] - ETA: 0s - loss: 0.7946 - accuracy: 0.4846\n",
"Epoch 00176: val_loss did not improve from 0.74411\n",
"302/302 [==============================] - 25s 83ms/step - loss: 0.7943 - accuracy: 0.4838 - val_loss: 0.7922 - val_accuracy: 0.4600\n"
]
}
],
"source": [
"\n",
"# Read Data and Augment it: Make sure to select augmentations that are appropriate to your images.\n",
"# To save augmentations un-comment save lines and add to your flow parameters.\n",
"train_datagen = ImageDataGenerator(rescale=1. / 255,\n",
" rotation_range=rotation_ratio,\n",
" width_shift_range=width_shift_range,\n",
" height_shift_range=height_shift_range,\n",
" shear_range=shear_range,\n",
" zoom_range=zoom_range,\n",
" horizontal_flip=True,\n",
" vertical_flip=True,\n",
" fill_mode='nearest',\n",
" validation_split=0.2)\n",
"\n",
"\n",
"\n",
"train_generator = train_datagen.flow_from_directory(train_data_dir,\n",
" target_size=(img_width, img_height),\n",
" batch_size=batch_size,\n",
" shuffle=True,\n",
" color_mode= \"grayscale\",\n",
" class_mode='binary',\n",
" subset='training') # set as training data\n",
"\n",
"validation_generator = train_datagen.flow_from_directory(train_data_dir, # same directory as training data\n",
" target_size=(img_width, img_height),\n",
" batch_size=batch_size,\n",
" shuffle=True,\n",
" color_mode=\"grayscale\",\n",
" class_mode='binary',\n",
" subset='validation') # set as validation data\n",
"\n",
"#opt = tf.keras.optimizers.SGD(learning_rate=learn_rate, momentum = momentum)\n",
"opt = tf.keras.optimizers.Adam(learning_rate=learn_rate)\n",
"model.compile(optimizer= opt,\n",
" loss='categorical_crossentropy', # categorical_crossentropy if multi-class classifier\n",
" metrics=['accuracy'])\n",
"\n",
"# save weights of best training epoch: monitor either val_loss or val_acc\n",
"\n",
"top_weights_path = os.path.join(os.path.abspath(model_path), 'top_model_weights.h5')\n",
"callbacks_list = [\n",
" ModelCheckpoint(top_weights_path, monitor='val_loss', verbose=1, save_best_only=True),\n",
" EarlyStopping(monitor='val_loss', patience= patience, verbose=0)\n",
"]\n",
"\n",
"# Train Simple CNN\n",
"hist = model.fit(train_generator,\n",
" steps_per_epoch= int(train_generator.samples/batch_size*.8),\n",
" epochs=nb_epoch,\n",
" validation_data=validation_generator,\n",
" validation_steps = int(validation_generator.samples /batch_size*.8),\n",
" validation_freq=1,\n",
" callbacks=callbacks_list)\n",
"\n"
]
},
{
"cell_type": "code",
"execution_count": 5,
"metadata": {},
"outputs": [],
"source": [
"\n",
"model_json = model.to_json()\n",
"with open(os.path.join(os.path.abspath(model_path), 'model.json'), 'w') as json_file:\n",
" json_file.write(model_json)"
]
},
{
"cell_type": "code",
"execution_count": 6,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"WARNING:tensorflow:From <ipython-input-6-94ce6e05c1eb>:10: Model.predict_generator (from tensorflow.python.keras.engine.training) is deprecated and will be removed in a future version.\n",
"Instructions for updating:\n",
"Please use Model.predict, which supports generators.\n",
"Confusion Matrix\n",
"[[ 86 82]\n",
" [ 88 121]]\n",
"Classification Report\n",
" precision recall f1-score support\n",
"\n",
" 0-Normal 0.49 0.51 0.50 168\n",
" 1-Falla 0.60 0.58 0.59 209\n",
"\n",
" accuracy 0.55 377\n",
" macro avg 0.55 0.55 0.55 377\n",
"weighted avg 0.55 0.55 0.55 377\n",
"\n"
]
}
],
"source": [
"from sklearn.metrics import classification_report, confusion_matrix\n",
"\n",
"json_file = open(os.path.join(os.path.abspath(model_path), 'model.json'), 'r')\n",
"loaded_model_json = json_file.read()\n",
"json_file.close()\n",
"loaded_model = tf.keras.models.model_from_json(loaded_model_json)\n",
"\n",
"\n",
"#Confution Matrix and Classification Report\n",
"Y_pred = model.predict_generator(validation_generator, validation_generator.samples // batch_size+1)\n",
"y_pred = np.argmax(Y_pred, axis=1)\n",
"print('Confusion Matrix')\n",
"print(confusion_matrix(validation_generator.classes, y_pred))\n",
"print('Classification Report')\n",
"target_names = train_generator.class_indices.keys()\n",
"print(classification_report(validation_generator.classes, y_pred, target_names=target_names))"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"num_classes = 10\n",
"input_shape = (32, 32, 1)\n",
"\n",
"\"\"\"\n",
"base_model = tf.keras.applications.MobileNetV2(input_shape=input_shape, weights=None, include_top=False)\n",
"\n",
"# Top Model Block\n",
"x = base_model.output\n",
"x = GlobalAveragePooling2D()(x)\n",
"predictions = Dense(10, activation='softmax')(x)\n",
"\n",
"# add your top layer block to your base model\n",
"model = tf.keras.Model(base_model.input, predictions)\n",
"\"\"\"\n",
"\n",
"model = tf.keras.Sequential(\n",
" [\n",
" tf.keras.Input(shape=input_shape),\n",
" tf.keras.layers.Conv2D(32, kernel_size=(3, 3), activation=\"relu\"),\n",
" tf.keras.layers.MaxPooling2D(pool_size=(2, 2)),\n",
" tf.keras.layers.Conv2D(64, kernel_size=(3, 3), activation=\"relu\"),\n",
" tf.keras.layers.MaxPooling2D(pool_size=(2, 2)),\n",
" tf.keras.layers.Flatten(),\n",
" tf.keras.layers.Dropout(0.5),\n",
" tf.keras.layers.Dense(num_classes, activation=\"softmax\"),\n",
" ]\n",
")\n",
"\n",
"model.summary()"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"\n",
"\n",
"# the data, split between train and test sets\n",
"(x_train, y_train), (x_test, y_test) = tf.keras.datasets.mnist.load_data()\n",
"\n",
"# Scale images to the [0, 1] range\n",
"x_train = x_train.astype(\"float32\") / 255\n",
"x_test = x_test.astype(\"float32\") / 255\n",
"# Make sure images have shape (28, 28, 1)\n",
"x_train = np.expand_dims(x_train, -1)\n",
"x_test = np.expand_dims(x_test, -1)\n",
"x_train = tf.image.resize(x_train, [input_shape[1], input_shape[0]])\n",
"x_test = tf.image.resize(x_test, [input_shape[1], input_shape[0]])\n",
"\n",
"print(\"x_train shape:\", x_train.shape)\n",
"print(x_train.shape[0], \"train samples\")\n",
"print(x_test.shape[0], \"test samples\")\n",
"\n",
"\n",
"# convert class vectors to binary class matrices\n",
"y_train = tf.keras.utils.to_categorical(y_train, num_classes)\n",
"y_test = tf.keras.utils.to_categorical(y_test, num_classes)\n",
"\n",
"\n",
"batch_size = 128\n",
"epochs = 5\n",
"\n",
"model.compile(loss=\"categorical_crossentropy\", optimizer=\"adam\", metrics=[\"accuracy\"])\n",
"\n",
"model.fit(x_train, y_train, batch_size=batch_size, epochs=epochs, validation_split=0.1)\n",
"\n",
"score = model.evaluate(x_test, y_test, verbose=0)\n",
"print(\"Test loss:\", score[0])\n",
"print(\"Test accuracy:\", score[1])"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"A = train_generator.next()\n",
"Im = A[0][0][:,:,0]\n",
"label = A[1][0]\n",
"plt.imshow(cv2.resize(Im,(64,128)), cmap = 'gray')\n",
"plt.title(label)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"\n",
"\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.7.7"
}
},
"nbformat": 4,
"nbformat_minor": 4
}