Files
Photovoltaic_Fault_Detector/Panel_Detector.ipynb
Daniel Saavedra e91f29cd2b config Diode Fault
2020-02-19 14:26:55 -03:00

1303 lines
119 KiB
Plaintext

{
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Detector de Celulas"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"\n",
"\n"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"\n"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": []
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Cargar el modelo ssd7 \n",
"(https://github.com/pierluigiferrari/ssd_keras#how-to-fine-tune-one-of-the-trained-models-on-your-own-dataset)\n",
"\n",
"Training del SSD7 (modelo reducido de SSD). Parámetros en config_7.json y descargar VGG_ILSVRC_16_layers_fc_reduced.h5\n",
"\n",
"\n"
]
},
{
"cell_type": "code",
"execution_count": 7,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"\n",
"Training on: \t{'1': 1}\n",
"\n",
"OK create model\n",
"\n",
"Loading pretrained weights VGG.\n",
"\n",
"__________________________________________________________________________________________________\n",
"Layer (type) Output Shape Param # Connected to \n",
"==================================================================================================\n",
"input_1 (InputLayer) (None, 400, 400, 3) 0 \n",
"__________________________________________________________________________________________________\n",
"identity_layer (Lambda) (None, 400, 400, 3) 0 input_1[0][0] \n",
"__________________________________________________________________________________________________\n",
"input_mean_normalization (Lambd (None, 400, 400, 3) 0 identity_layer[0][0] \n",
"__________________________________________________________________________________________________\n",
"input_channel_swap (Lambda) (None, 400, 400, 3) 0 input_mean_normalization[0][0] \n",
"__________________________________________________________________________________________________\n",
"conv1_1 (Conv2D) (None, 400, 400, 64) 1792 input_channel_swap[0][0] \n",
"__________________________________________________________________________________________________\n",
"conv1_2 (Conv2D) (None, 400, 400, 64) 36928 conv1_1[0][0] \n",
"__________________________________________________________________________________________________\n",
"pool1 (MaxPooling2D) (None, 200, 200, 64) 0 conv1_2[0][0] \n",
"__________________________________________________________________________________________________\n",
"conv2_1 (Conv2D) (None, 200, 200, 128 73856 pool1[0][0] \n",
"__________________________________________________________________________________________________\n",
"conv2_2 (Conv2D) (None, 200, 200, 128 147584 conv2_1[0][0] \n",
"__________________________________________________________________________________________________\n",
"pool2 (MaxPooling2D) (None, 100, 100, 128 0 conv2_2[0][0] \n",
"__________________________________________________________________________________________________\n",
"conv3_1 (Conv2D) (None, 100, 100, 256 295168 pool2[0][0] \n",
"__________________________________________________________________________________________________\n",
"conv3_2 (Conv2D) (None, 100, 100, 256 590080 conv3_1[0][0] \n",
"__________________________________________________________________________________________________\n",
"conv3_3 (Conv2D) (None, 100, 100, 256 590080 conv3_2[0][0] \n",
"__________________________________________________________________________________________________\n",
"pool3 (MaxPooling2D) (None, 50, 50, 256) 0 conv3_3[0][0] \n",
"__________________________________________________________________________________________________\n",
"conv4_1 (Conv2D) (None, 50, 50, 512) 1180160 pool3[0][0] \n",
"__________________________________________________________________________________________________\n",
"conv4_2 (Conv2D) (None, 50, 50, 512) 2359808 conv4_1[0][0] \n",
"__________________________________________________________________________________________________\n",
"conv4_3 (Conv2D) (None, 50, 50, 512) 2359808 conv4_2[0][0] \n",
"__________________________________________________________________________________________________\n",
"pool4 (MaxPooling2D) (None, 25, 25, 512) 0 conv4_3[0][0] \n",
"__________________________________________________________________________________________________\n",
"conv5_1 (Conv2D) (None, 25, 25, 512) 2359808 pool4[0][0] \n",
"__________________________________________________________________________________________________\n",
"conv5_2 (Conv2D) (None, 25, 25, 512) 2359808 conv5_1[0][0] \n",
"__________________________________________________________________________________________________\n",
"conv5_3 (Conv2D) (None, 25, 25, 512) 2359808 conv5_2[0][0] \n",
"__________________________________________________________________________________________________\n",
"pool5 (MaxPooling2D) (None, 25, 25, 512) 0 conv5_3[0][0] \n",
"__________________________________________________________________________________________________\n",
"fc6 (Conv2D) (None, 25, 25, 1024) 4719616 pool5[0][0] \n",
"__________________________________________________________________________________________________\n",
"fc7 (Conv2D) (None, 25, 25, 1024) 1049600 fc6[0][0] \n",
"__________________________________________________________________________________________________\n",
"conv6_1 (Conv2D) (None, 25, 25, 256) 262400 fc7[0][0] \n",
"__________________________________________________________________________________________________\n",
"conv6_padding (ZeroPadding2D) (None, 27, 27, 256) 0 conv6_1[0][0] \n",
"__________________________________________________________________________________________________\n",
"conv6_2 (Conv2D) (None, 13, 13, 512) 1180160 conv6_padding[0][0] \n",
"__________________________________________________________________________________________________\n",
"conv7_1 (Conv2D) (None, 13, 13, 128) 65664 conv6_2[0][0] \n",
"__________________________________________________________________________________________________\n",
"conv7_padding (ZeroPadding2D) (None, 15, 15, 128) 0 conv7_1[0][0] \n",
"__________________________________________________________________________________________________\n",
"conv7_2 (Conv2D) (None, 7, 7, 256) 295168 conv7_padding[0][0] \n",
"__________________________________________________________________________________________________\n",
"conv8_1 (Conv2D) (None, 7, 7, 128) 32896 conv7_2[0][0] \n",
"__________________________________________________________________________________________________\n",
"conv8_2 (Conv2D) (None, 5, 5, 256) 295168 conv8_1[0][0] \n",
"__________________________________________________________________________________________________\n",
"conv9_1 (Conv2D) (None, 5, 5, 128) 32896 conv8_2[0][0] \n",
"__________________________________________________________________________________________________\n",
"conv4_3_norm (L2Normalization) (None, 50, 50, 512) 512 conv4_3[0][0] \n",
"__________________________________________________________________________________________________\n",
"conv9_2 (Conv2D) (None, 3, 3, 256) 295168 conv9_1[0][0] \n",
"__________________________________________________________________________________________________\n",
"conv4_3_norm_mbox_conf (Conv2D) (None, 50, 50, 8) 36872 conv4_3_norm[0][0] \n",
"__________________________________________________________________________________________________\n",
"fc7_mbox_conf (Conv2D) (None, 25, 25, 12) 110604 fc7[0][0] \n",
"__________________________________________________________________________________________________\n",
"conv6_2_mbox_conf (Conv2D) (None, 13, 13, 12) 55308 conv6_2[0][0] \n",
"__________________________________________________________________________________________________\n",
"conv7_2_mbox_conf (Conv2D) (None, 7, 7, 12) 27660 conv7_2[0][0] \n",
"__________________________________________________________________________________________________\n",
"conv8_2_mbox_conf (Conv2D) (None, 5, 5, 8) 18440 conv8_2[0][0] \n",
"__________________________________________________________________________________________________\n",
"conv9_2_mbox_conf (Conv2D) (None, 3, 3, 8) 18440 conv9_2[0][0] \n",
"__________________________________________________________________________________________________\n",
"conv4_3_norm_mbox_loc (Conv2D) (None, 50, 50, 16) 73744 conv4_3_norm[0][0] \n",
"__________________________________________________________________________________________________\n",
"fc7_mbox_loc (Conv2D) (None, 25, 25, 24) 221208 fc7[0][0] \n",
"__________________________________________________________________________________________________\n",
"conv6_2_mbox_loc (Conv2D) (None, 13, 13, 24) 110616 conv6_2[0][0] \n",
"__________________________________________________________________________________________________\n",
"conv7_2_mbox_loc (Conv2D) (None, 7, 7, 24) 55320 conv7_2[0][0] \n",
"__________________________________________________________________________________________________\n",
"conv8_2_mbox_loc (Conv2D) (None, 5, 5, 16) 36880 conv8_2[0][0] \n",
"__________________________________________________________________________________________________\n",
"conv9_2_mbox_loc (Conv2D) (None, 3, 3, 16) 36880 conv9_2[0][0] \n",
"__________________________________________________________________________________________________\n",
"conv4_3_norm_mbox_conf_reshape (None, 10000, 2) 0 conv4_3_norm_mbox_conf[0][0] \n",
"__________________________________________________________________________________________________\n",
"fc7_mbox_conf_reshape (Reshape) (None, 3750, 2) 0 fc7_mbox_conf[0][0] \n",
"__________________________________________________________________________________________________\n",
"conv6_2_mbox_conf_reshape (Resh (None, 1014, 2) 0 conv6_2_mbox_conf[0][0] \n",
"__________________________________________________________________________________________________\n",
"conv7_2_mbox_conf_reshape (Resh (None, 294, 2) 0 conv7_2_mbox_conf[0][0] \n",
"__________________________________________________________________________________________________\n",
"conv8_2_mbox_conf_reshape (Resh (None, 100, 2) 0 conv8_2_mbox_conf[0][0] \n",
"__________________________________________________________________________________________________\n",
"conv9_2_mbox_conf_reshape (Resh (None, 36, 2) 0 conv9_2_mbox_conf[0][0] \n",
"__________________________________________________________________________________________________\n",
"conv4_3_norm_mbox_priorbox (Anc (None, 50, 50, 4, 8) 0 conv4_3_norm_mbox_loc[0][0] \n",
"__________________________________________________________________________________________________\n",
"fc7_mbox_priorbox (AnchorBoxes) (None, 25, 25, 6, 8) 0 fc7_mbox_loc[0][0] \n",
"__________________________________________________________________________________________________\n",
"conv6_2_mbox_priorbox (AnchorBo (None, 13, 13, 6, 8) 0 conv6_2_mbox_loc[0][0] \n",
"__________________________________________________________________________________________________\n",
"conv7_2_mbox_priorbox (AnchorBo (None, 7, 7, 6, 8) 0 conv7_2_mbox_loc[0][0] \n",
"__________________________________________________________________________________________________\n",
"conv8_2_mbox_priorbox (AnchorBo (None, 5, 5, 4, 8) 0 conv8_2_mbox_loc[0][0] \n",
"__________________________________________________________________________________________________\n",
"conv9_2_mbox_priorbox (AnchorBo (None, 3, 3, 4, 8) 0 conv9_2_mbox_loc[0][0] \n",
"__________________________________________________________________________________________________\n",
"mbox_conf (Concatenate) (None, 15194, 2) 0 conv4_3_norm_mbox_conf_reshape[0]\n",
" fc7_mbox_conf_reshape[0][0] \n",
" conv6_2_mbox_conf_reshape[0][0] \n",
" conv7_2_mbox_conf_reshape[0][0] \n",
" conv8_2_mbox_conf_reshape[0][0] \n",
" conv9_2_mbox_conf_reshape[0][0] \n",
"__________________________________________________________________________________________________\n",
"conv4_3_norm_mbox_loc_reshape ( (None, 10000, 4) 0 conv4_3_norm_mbox_loc[0][0] \n",
"__________________________________________________________________________________________________\n",
"fc7_mbox_loc_reshape (Reshape) (None, 3750, 4) 0 fc7_mbox_loc[0][0] \n",
"__________________________________________________________________________________________________\n",
"conv6_2_mbox_loc_reshape (Resha (None, 1014, 4) 0 conv6_2_mbox_loc[0][0] \n",
"__________________________________________________________________________________________________\n",
"conv7_2_mbox_loc_reshape (Resha (None, 294, 4) 0 conv7_2_mbox_loc[0][0] \n",
"__________________________________________________________________________________________________\n",
"conv8_2_mbox_loc_reshape (Resha (None, 100, 4) 0 conv8_2_mbox_loc[0][0] \n",
"__________________________________________________________________________________________________\n",
"conv9_2_mbox_loc_reshape (Resha (None, 36, 4) 0 conv9_2_mbox_loc[0][0] \n",
"__________________________________________________________________________________________________\n",
"conv4_3_norm_mbox_priorbox_resh (None, 10000, 8) 0 conv4_3_norm_mbox_priorbox[0][0] \n",
"__________________________________________________________________________________________________\n",
"fc7_mbox_priorbox_reshape (Resh (None, 3750, 8) 0 fc7_mbox_priorbox[0][0] \n",
"__________________________________________________________________________________________________\n",
"conv6_2_mbox_priorbox_reshape ( (None, 1014, 8) 0 conv6_2_mbox_priorbox[0][0] \n",
"__________________________________________________________________________________________________\n",
"conv7_2_mbox_priorbox_reshape ( (None, 294, 8) 0 conv7_2_mbox_priorbox[0][0] \n",
"__________________________________________________________________________________________________\n",
"conv8_2_mbox_priorbox_reshape ( (None, 100, 8) 0 conv8_2_mbox_priorbox[0][0] \n",
"__________________________________________________________________________________________________\n",
"conv9_2_mbox_priorbox_reshape ( (None, 36, 8) 0 conv9_2_mbox_priorbox[0][0] \n",
"__________________________________________________________________________________________________\n",
"mbox_conf_softmax (Activation) (None, 15194, 2) 0 mbox_conf[0][0] \n",
"__________________________________________________________________________________________________\n",
"mbox_loc (Concatenate) (None, 15194, 4) 0 conv4_3_norm_mbox_loc_reshape[0][\n",
" fc7_mbox_loc_reshape[0][0] \n",
" conv6_2_mbox_loc_reshape[0][0] \n",
" conv7_2_mbox_loc_reshape[0][0] \n",
" conv8_2_mbox_loc_reshape[0][0] \n",
" conv9_2_mbox_loc_reshape[0][0] \n",
"__________________________________________________________________________________________________\n",
"mbox_priorbox (Concatenate) (None, 15194, 8) 0 conv4_3_norm_mbox_priorbox_reshap\n",
" fc7_mbox_priorbox_reshape[0][0] \n",
" conv6_2_mbox_priorbox_reshape[0][\n",
" conv7_2_mbox_priorbox_reshape[0][\n",
" conv8_2_mbox_priorbox_reshape[0][\n",
" conv9_2_mbox_priorbox_reshape[0][\n",
"__________________________________________________________________________________________________\n",
"predictions (Concatenate) (None, 15194, 14) 0 mbox_conf_softmax[0][0] \n",
" mbox_loc[0][0] \n",
" mbox_priorbox[0][0] \n",
"==================================================================================================\n",
"Total params: 23,745,908\n",
"Trainable params: 23,745,908\n",
"Non-trainable params: 0\n",
"__________________________________________________________________________________________________\n"
]
}
],
"source": [
"from keras.optimizers import Adam, SGD\n",
"from keras.callbacks import ModelCheckpoint, LearningRateScheduler, TerminateOnNaN, CSVLogger\n",
"from keras import backend as K\n",
"from keras.models import load_model\n",
"from math import ceil\n",
"import numpy as np\n",
"from matplotlib import pyplot as plt\n",
"import os\n",
"import json\n",
"import xml.etree.cElementTree as ET\n",
"\n",
"import sys\n",
"sys.path += [os.path.abspath('ssd_keras-master')]\n",
"\n",
"from keras_loss_function.keras_ssd_loss import SSDLoss\n",
"from keras_layers.keras_layer_AnchorBoxes import AnchorBoxes\n",
"from keras_layers.keras_layer_DecodeDetections import DecodeDetections\n",
"from keras_layers.keras_layer_DecodeDetectionsFast import DecodeDetectionsFast\n",
"from keras_layers.keras_layer_L2Normalization import L2Normalization\n",
"from ssd_encoder_decoder.ssd_input_encoder import SSDInputEncoder\n",
"from ssd_encoder_decoder.ssd_output_decoder import decode_detections, decode_detections_fast\n",
"from data_generator.object_detection_2d_data_generator import DataGenerator\n",
"from data_generator.object_detection_2d_geometric_ops import Resize\n",
"from data_generator.object_detection_2d_photometric_ops import ConvertTo3Channels\n",
"from data_generator.data_augmentation_chain_original_ssd import SSDDataAugmentation\n",
"from data_generator.object_detection_2d_misc_utils import apply_inverse_transforms\n",
"from eval_utils.average_precision_evaluator import Evaluator\n",
"from data_generator.data_augmentation_chain_variable_input_size import DataAugmentationVariableInputSize\n",
"from data_generator.data_augmentation_chain_constant_input_size import DataAugmentationConstantInputSize\n",
"\n",
"\n",
"def makedirs(path):\n",
" try:\n",
" os.makedirs(path)\n",
" except OSError:\n",
" if not os.path.isdir(path):\n",
" raise\n",
"\n",
"\n",
"\n",
"\n",
"\n",
"K.tensorflow_backend._get_available_gpus()\n",
"\n",
"\n",
"def lr_schedule(epoch):\n",
" if epoch < 80:\n",
" return 0.001\n",
" elif epoch < 100:\n",
" return 0.0001\n",
" else:\n",
" return 0.00001\n",
"\n",
"config_path = 'config_300_fault_1.json'\n",
"\n",
"\n",
"with open(config_path) as config_buffer:\n",
" config = json.loads(config_buffer.read())\n",
"\n",
"###############################\n",
"# Parse the annotations\n",
"###############################\n",
"path_imgs_training = config['train']['train_image_folder']\n",
"path_anns_training = config['train']['train_annot_folder']\n",
"path_imgs_val = config['test']['test_image_folder']\n",
"path_anns_val = config['test']['test_annot_folder']\n",
"labels = config['model']['labels']\n",
"categories = {}\n",
"#categories = {\"Razor\": 1, \"Gun\": 2, \"Knife\": 3, \"Shuriken\": 4} #la categoría 0 es la background\n",
"for i in range(len(labels)): categories[labels[i]] = i+1\n",
"print('\\nTraining on: \\t' + str(categories) + '\\n')\n",
"\n",
"####################################\n",
"# Parameters\n",
"###################################\n",
" #%%\n",
"img_height = config['model']['input'] # Height of the model input images\n",
"img_width = config['model']['input'] # Width of the model input images\n",
"img_channels = 3 # Number of color channels of the model input images\n",
"mean_color = [123, 117, 104] # The per-channel mean of the images in the dataset. Do not change this value if you're using any of the pre-trained weights.\n",
"swap_channels = [2, 1, 0] # The color channel order in the original SSD is BGR, so we'll have the model reverse the color channel order of the input images.\n",
"n_classes = len(labels) # Number of positive classes, e.g. 20 for Pascal VOC, 80 for MS COCO\n",
"scales_pascal = [0.1, 0.2, 0.37, 0.54, 0.71, 0.88, 1.05] # The anchor box scaling factors used in the original SSD300 for the Pascal VOC datasets\n",
"#scales_coco = [0.07, 0.15, 0.33, 0.51, 0.69, 0.87, 1.05] # The anchor box scaling factors used in the original SSD300 for the MS COCO datasets\n",
"scales = scales_pascal\n",
"aspect_ratios = [[1.0, 2.0, 0.5],\n",
" [1.0, 2.0, 0.5, 3.0, 1.0/3.0],\n",
" [1.0, 2.0, 0.5, 3.0, 1.0/3.0],\n",
" [1.0, 2.0, 0.5, 3.0, 1.0/3.0],\n",
" [1.0, 2.0, 0.5],\n",
" [1.0, 2.0, 0.5]] # The anchor box aspect ratios used in the original SSD300; the order matters\n",
"two_boxes_for_ar1 = True\n",
"steps = [8, 16, 32, 64, 100, 300] # The space between two adjacent anchor box center points for each predictor layer.\n",
"offsets = [0.5, 0.5, 0.5, 0.5, 0.5, 0.5] # The offsets of the first anchor box center points from the top and left borders of the image as a fraction of the step size for each predictor layer.\n",
"clip_boxes = False # Whether or not to clip the anchor boxes to lie entirely within the image boundaries\n",
"variances = [0.1, 0.1, 0.2, 0.2] # The variances by which the encoded target coordinates are divided as in the original implementation\n",
"normalize_coords = True\n",
"\n",
"K.clear_session() # Clear previous models from memory.\n",
"\n",
"\n",
"model_path = config['train']['saved_weights_name']\n",
"# 3: Instantiate an optimizer and the SSD loss function and compile the model.\n",
"# If you want to follow the original Caffe implementation, use the preset SGD\n",
"# optimizer, otherwise I'd recommend the commented-out Adam optimizer.\n",
"\n",
"\n",
"if config['model']['backend'] == 'ssd7':\n",
" #weights_path = 'VGG_ILSVRC_16_layers_fc_reduced.h5'\n",
" scales = [0.08, 0.16, 0.32, 0.64, 0.96] # An explicit list of anchor box scaling factors. If this is passed, it will override `min_scale` and `max_scale`.\n",
" aspect_ratios = [0.5 ,1.0, 2.0] # The list of aspect ratios for the anchor boxes\n",
" two_boxes_for_ar1 = True # Whether or not you want to generate two anchor boxes for aspect ratio 1\n",
" steps = None # In case you'd like to set the step sizes for the anchor box grids manually; not recommended\n",
" offsets = None\n",
"\n",
"if os.path.exists(model_path):\n",
" print(\"\\nLoading pretrained weights.\\n\")\n",
" # We need to create an SSDLoss object in order to pass that to the model loader.\n",
" ssd_loss = SSDLoss(neg_pos_ratio=3, alpha=1.0)\n",
"\n",
" K.clear_session() # Clear previous models from memory.\n",
" model = load_model(model_path, custom_objects={'AnchorBoxes': AnchorBoxes,\n",
" 'L2Normalization': L2Normalization,\n",
" 'compute_loss': ssd_loss.compute_loss})\n",
"\n",
"\n",
"else:\n",
" ####################################\n",
" # Build the Keras model.\n",
" ###################################\n",
"\n",
" if config['model']['backend'] == 'ssd300':\n",
" #weights_path = 'VGG_VOC0712Plus_SSD_300x300_ft_iter_160000.h5'\n",
" from models.keras_ssd300 import ssd_300\n",
"\n",
" model = ssd_300(image_size=(img_height, img_width, img_channels),\n",
" n_classes=n_classes,\n",
" mode='training',\n",
" l2_regularization=0.0005,\n",
" scales=scales,\n",
" aspect_ratios_per_layer=aspect_ratios,\n",
" two_boxes_for_ar1=two_boxes_for_ar1,\n",
" steps=steps,\n",
" offsets=offsets,\n",
" clip_boxes=clip_boxes,\n",
" variances=variances,\n",
" normalize_coords=normalize_coords,\n",
" subtract_mean=mean_color,\n",
" swap_channels=swap_channels)\n",
"\n",
"\n",
" elif config['model']['backend'] == 'ssd7':\n",
" #weights_path = 'VGG_ILSVRC_16_layers_fc_reduced.h5'\n",
" from models.keras_ssd7 import build_model as ssd\n",
" scales = [0.08, 0.16, 0.32, 0.64, 0.96] # An explicit list of anchor box scaling factors. If this is passed, it will override `min_scale` and `max_scale`.\n",
" aspect_ratios = [0.5 ,1.0, 2.0] # The list of aspect ratios for the anchor boxes\n",
" two_boxes_for_ar1 = True # Whether or not you want to generate two anchor boxes for aspect ratio 1\n",
" steps = None # In case you'd like to set the step sizes for the anchor box grids manually; not recommended\n",
" offsets = None\n",
" model = ssd(image_size=(img_height, img_width, img_channels),\n",
" n_classes=n_classes,\n",
" mode='training',\n",
" l2_regularization=0.0005,\n",
" scales=scales,\n",
" aspect_ratios_global=aspect_ratios,\n",
" aspect_ratios_per_layer=None,\n",
" two_boxes_for_ar1=two_boxes_for_ar1,\n",
" steps=steps,\n",
" offsets=offsets,\n",
" clip_boxes=clip_boxes,\n",
" variances=variances,\n",
" normalize_coords=normalize_coords,\n",
" subtract_mean=None,\n",
" divide_by_stddev=None)\n",
"\n",
" else :\n",
" print('Wrong Backend')\n",
"\n",
"\n",
"\n",
" print('OK create model')\n",
" #sgd = SGD(lr=config['train']['learning_rate'], momentum=0.9, decay=0.0, nesterov=False)\n",
"\n",
" # TODO: Set the path to the weights you want to load. only for ssd300 or ssd512\n",
"\n",
" weights_path = '../ssd_keras-master/VGG_ILSVRC_16_layers_fc_reduced.h5'\n",
" print(\"\\nLoading pretrained weights VGG.\\n\")\n",
" model.load_weights(weights_path, by_name=True)\n",
"\n",
" # 3: Instantiate an optimizer and the SSD loss function and compile the model.\n",
" # If you want to follow the original Caffe implementation, use the preset SGD\n",
" # optimizer, otherwise I'd recommend the commented-out Adam optimizer.\n",
"\n",
"\n",
" #adam = Adam(lr=0.001, beta_1=0.9, beta_2=0.999, epsilon=1e-08, decay=0.0)\n",
" #sgd = SGD(lr=0.001, momentum=0.9, decay=0.0, nesterov=False)\n",
" optimizer = Adam(lr=config['train']['learning_rate'], beta_1=0.9, beta_2=0.999, epsilon=1e-08, decay=0.0)\n",
" ssd_loss = SSDLoss(neg_pos_ratio=3, alpha=1.0)\n",
" model.compile(optimizer=optimizer, loss=ssd_loss.compute_loss)\n",
"\n",
" model.summary()\n"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Instanciar los generadores de datos y entrenamiento del modelo.\n",
"\n",
"*Cambio realizado para leer png y jpg. keras-ssd-master/data_generator/object_detection_2d_data_generator.py función parse_xml\n"
]
},
{
"cell_type": "code",
"execution_count": 5,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Processing image set 'train.txt': 100%|██████████| 1/1 [00:00<00:00, 18.73it/s]\n",
"Processing image set 'test.txt': 100%|██████████| 1/1 [00:00<00:00, 20.23it/s]\n",
"panel : 69\n",
"Number of images in the training dataset:\t 1\n",
"Number of images in the validation dataset:\t 1\n",
"Epoch 1/100\n",
"\n",
"Epoch 00001: LearningRateScheduler setting learning rate to 0.001.\n",
"27/50 [===============>..............] - ETA: 19s - loss: 10.4861"
]
},
{
"ename": "KeyboardInterrupt",
"evalue": "",
"output_type": "error",
"traceback": [
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
"\u001b[0;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)",
"\u001b[0;32m<ipython-input-5-eddb2cf7cd19>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m()\u001b[0m\n\u001b[1;32m 201\u001b[0m \u001b[0mvalidation_steps\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mceil\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mval_dataset_size\u001b[0m\u001b[0;34m/\u001b[0m\u001b[0mbatch_size\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 202\u001b[0m \u001b[0minitial_epoch\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0minitial_epoch\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 203\u001b[0;31m verbose = 1 if config['train']['debug'] else 2)\n\u001b[0m\u001b[1;32m 204\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 205\u001b[0m \u001b[0mhistory_path\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mconfig\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m'train'\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m'saved_weights_name'\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msplit\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'.'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m+\u001b[0m \u001b[0;34m'_history'\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;32m~/anaconda3/envs/model/lib/python3.6/site-packages/keras/legacy/interfaces.py\u001b[0m in \u001b[0;36mwrapper\u001b[0;34m(*args, **kwargs)\u001b[0m\n\u001b[1;32m 89\u001b[0m warnings.warn('Update your `' + object_name + '` call to the ' +\n\u001b[1;32m 90\u001b[0m 'Keras 2 API: ' + signature, stacklevel=2)\n\u001b[0;32m---> 91\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0mfunc\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 92\u001b[0m \u001b[0mwrapper\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_original_function\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mfunc\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 93\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mwrapper\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;32m~/anaconda3/envs/model/lib/python3.6/site-packages/keras/engine/training.py\u001b[0m in \u001b[0;36mfit_generator\u001b[0;34m(self, generator, steps_per_epoch, epochs, verbose, callbacks, validation_data, validation_steps, class_weight, max_queue_size, workers, use_multiprocessing, shuffle, initial_epoch)\u001b[0m\n\u001b[1;32m 1416\u001b[0m \u001b[0muse_multiprocessing\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0muse_multiprocessing\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1417\u001b[0m \u001b[0mshuffle\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mshuffle\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1418\u001b[0;31m initial_epoch=initial_epoch)\n\u001b[0m\u001b[1;32m 1419\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1420\u001b[0m \u001b[0;34m@\u001b[0m\u001b[0minterfaces\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mlegacy_generator_methods_support\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;32m~/anaconda3/envs/model/lib/python3.6/site-packages/keras/engine/training_generator.py\u001b[0m in \u001b[0;36mfit_generator\u001b[0;34m(model, generator, steps_per_epoch, epochs, verbose, callbacks, validation_data, validation_steps, class_weight, max_queue_size, workers, use_multiprocessing, shuffle, initial_epoch)\u001b[0m\n\u001b[1;32m 215\u001b[0m outs = model.train_on_batch(x, y,\n\u001b[1;32m 216\u001b[0m \u001b[0msample_weight\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0msample_weight\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 217\u001b[0;31m class_weight=class_weight)\n\u001b[0m\u001b[1;32m 218\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 219\u001b[0m \u001b[0mouts\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mto_list\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mouts\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;32m~/anaconda3/envs/model/lib/python3.6/site-packages/keras/engine/training.py\u001b[0m in \u001b[0;36mtrain_on_batch\u001b[0;34m(self, x, y, sample_weight, class_weight)\u001b[0m\n\u001b[1;32m 1215\u001b[0m \u001b[0mins\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mx\u001b[0m \u001b[0;34m+\u001b[0m \u001b[0my\u001b[0m \u001b[0;34m+\u001b[0m \u001b[0msample_weights\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1216\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_make_train_function\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1217\u001b[0;31m \u001b[0moutputs\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mtrain_function\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mins\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 1218\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0munpack_singleton\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0moutputs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1219\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;32m~/anaconda3/envs/model/lib/python3.6/site-packages/keras/backend/tensorflow_backend.py\u001b[0m in \u001b[0;36m__call__\u001b[0;34m(self, inputs)\u001b[0m\n\u001b[1;32m 2713\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_legacy_call\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0minputs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2714\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 2715\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_call\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0minputs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 2716\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2717\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mpy_any\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mis_tensor\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mx\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mx\u001b[0m \u001b[0;32min\u001b[0m \u001b[0minputs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;32m~/anaconda3/envs/model/lib/python3.6/site-packages/keras/backend/tensorflow_backend.py\u001b[0m in \u001b[0;36m_call\u001b[0;34m(self, inputs)\u001b[0m\n\u001b[1;32m 2673\u001b[0m \u001b[0mfetched\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_callable_fn\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0marray_vals\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mrun_metadata\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mrun_metadata\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2674\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 2675\u001b[0;31m \u001b[0mfetched\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_callable_fn\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0marray_vals\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 2676\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mfetched\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0mlen\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0moutputs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2677\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;32m~/anaconda3/envs/model/lib/python3.6/site-packages/tensorflow/python/client/session.py\u001b[0m in \u001b[0;36m__call__\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1380\u001b[0m ret = tf_session.TF_SessionRunCallable(\n\u001b[1;32m 1381\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_session\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_session\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_handle\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0margs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mstatus\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1382\u001b[0;31m run_metadata_ptr)\n\u001b[0m\u001b[1;32m 1383\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mrun_metadata\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1384\u001b[0m \u001b[0mproto_data\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtf_session\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mTF_GetBuffer\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mrun_metadata_ptr\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;31mKeyboardInterrupt\u001b[0m: "
]
}
],
"source": [
"#ENTRENAMIENTO DE MODELO\n",
"#####################################################################\n",
"# Instantiate two `DataGenerator` objects: One for training, one for validation.\n",
"######################################################################\n",
"# Optional: If you have enough memory, consider loading the images into memory for the reasons explained above.\n",
"\n",
"train_dataset = DataGenerator(load_images_into_memory=False, hdf5_dataset_path=None)\n",
"val_dataset = DataGenerator(load_images_into_memory=False, hdf5_dataset_path=None)\n",
"\n",
"# 2: Parse the image and label lists for the training and validation datasets. This can take a while.\n",
"\n",
"\n",
"\n",
"# The XML parser needs to now what object class names to look for and in which order to map them to integers.\n",
"classes = ['background' ] + labels\n",
"\n",
"train_dataset.parse_xml(images_dirs= [config['train']['train_image_folder']],\n",
" image_set_filenames=[config['train']['train_image_set_filename']],\n",
" annotations_dirs=[config['train']['train_annot_folder']],\n",
" classes=classes,\n",
" include_classes='all',\n",
" #classes = ['background', 'panel', 'cell'], \n",
" #include_classes=classes,\n",
" exclude_truncated=False,\n",
" exclude_difficult=False,\n",
" ret=False)\n",
"\n",
"val_dataset.parse_xml(images_dirs= [config['test']['test_image_folder']],\n",
" image_set_filenames=[config['test']['test_image_set_filename']],\n",
" annotations_dirs=[config['test']['test_annot_folder']],\n",
" classes=classes,\n",
" include_classes='all',\n",
" #classes = ['background', 'panel', 'cell'], \n",
" #include_classes=classes,\n",
" exclude_truncated=False,\n",
" exclude_difficult=False,\n",
" ret=False)\n",
"\n",
"#########################\n",
"# 3: Set the batch size.\n",
"#########################\n",
"batch_size = config['train']['batch_size'] # Change the batch size if you like, or if you run into GPU memory issues.\n",
"\n",
"##########################\n",
"# 4: Set the image transformations for pre-processing and data augmentation options.\n",
"##########################\n",
"# For the training generator:\n",
"\n",
"\n",
"# For the validation generator:\n",
"convert_to_3_channels = ConvertTo3Channels()\n",
"resize = Resize(height=img_height, width=img_width)\n",
"\n",
"######################################3\n",
"# 5: Instantiate an encoder that can encode ground truth labels into the format needed by the SSD loss function.\n",
"#########################################\n",
"# The encoder constructor needs the spatial dimensions of the model's predictor layers to create the anchor boxes.\n",
"if config['model']['backend'] == 'ssd300':\n",
" predictor_sizes = [model.get_layer('conv4_3_norm_mbox_conf').output_shape[1:3],\n",
" model.get_layer('fc7_mbox_conf').output_shape[1:3],\n",
" model.get_layer('conv6_2_mbox_conf').output_shape[1:3],\n",
" model.get_layer('conv7_2_mbox_conf').output_shape[1:3],\n",
" model.get_layer('conv8_2_mbox_conf').output_shape[1:3],\n",
" model.get_layer('conv9_2_mbox_conf').output_shape[1:3]]\n",
" ssd_input_encoder = SSDInputEncoder(img_height=img_height,\n",
" img_width=img_width,\n",
" n_classes=n_classes,\n",
" predictor_sizes=predictor_sizes,\n",
" scales=scales,\n",
" aspect_ratios_per_layer=aspect_ratios,\n",
" two_boxes_for_ar1=two_boxes_for_ar1,\n",
" steps=steps,\n",
" offsets=offsets,\n",
" clip_boxes=clip_boxes,\n",
" variances=variances,\n",
" matching_type='multi',\n",
" pos_iou_threshold=0.5,\n",
" neg_iou_limit=0.5,\n",
" normalize_coords=normalize_coords)\n",
"\n",
"elif config['model']['backend'] == 'ssd7':\n",
" predictor_sizes = [model.get_layer('classes4').output_shape[1:3],\n",
" model.get_layer('classes5').output_shape[1:3],\n",
" model.get_layer('classes6').output_shape[1:3],\n",
" model.get_layer('classes7').output_shape[1:3]]\n",
" ssd_input_encoder = SSDInputEncoder(img_height=img_height,\n",
" img_width=img_width,\n",
" n_classes=n_classes,\n",
" predictor_sizes=predictor_sizes,\n",
" scales=scales,\n",
" aspect_ratios_global=aspect_ratios,\n",
" two_boxes_for_ar1=two_boxes_for_ar1,\n",
" steps=steps,\n",
" offsets=offsets,\n",
" clip_boxes=clip_boxes,\n",
" variances=variances,\n",
" matching_type='multi',\n",
" pos_iou_threshold=0.5,\n",
" neg_iou_limit=0.3,\n",
" normalize_coords=normalize_coords)\n",
"\n",
"\n",
"\n",
" \n",
"data_augmentation_chain = DataAugmentationVariableInputSize(resize_height = img_height,\n",
" resize_width = img_width,\n",
" random_brightness=(-48, 48, 0.5),\n",
" random_contrast=(0.5, 1.8, 0.5),\n",
" random_saturation=(0.5, 1.8, 0.5),\n",
" random_hue=(18, 0.5),\n",
" random_flip=0.5,\n",
" n_trials_max=3,\n",
" clip_boxes=True,\n",
" overlap_criterion='area',\n",
" bounds_box_filter=(0.3, 1.0),\n",
" bounds_validator=(0.5, 1.0),\n",
" n_boxes_min=1,\n",
" background=(0,0,0))\n",
"#######################\n",
"# 6: Create the generator handles that will be passed to Keras' `fit_generator()` function.\n",
"#######################\n",
"\n",
"train_generator = train_dataset.generate(batch_size=batch_size,\n",
" shuffle=True,\n",
" transformations= [data_augmentation_chain],\n",
" label_encoder=ssd_input_encoder,\n",
" returns={'processed_images',\n",
" 'encoded_labels'},\n",
" keep_images_without_gt=False)\n",
"\n",
"val_generator = val_dataset.generate(batch_size=batch_size,\n",
" shuffle=False,\n",
" transformations=[convert_to_3_channels,\n",
" resize],\n",
" label_encoder=ssd_input_encoder,\n",
" returns={'processed_images',\n",
" 'encoded_labels'},\n",
" keep_images_without_gt=False)\n",
"\n",
"# Summary instance training\n",
"category_train_list = []\n",
"for image_label in train_dataset.labels:\n",
" category_train_list += [i[0] for i in train_dataset.labels[0]]\n",
"summary_category_training = {train_dataset.classes[i]: category_train_list.count(i) for i in list(set(category_train_list))}\n",
"for i in summary_category_training.keys():\n",
" print(i, ': {:.0f}'.format(summary_category_training[i]))\n",
"\n",
"\n",
"\n",
"# Get the number of samples in the training and validations datasets.\n",
"train_dataset_size = train_dataset.get_dataset_size()\n",
"val_dataset_size = val_dataset.get_dataset_size()\n",
"\n",
"print(\"Number of images in the training dataset:\\t{:>6}\".format(train_dataset_size))\n",
"print(\"Number of images in the validation dataset:\\t{:>6}\".format(val_dataset_size))\n",
"\n",
"\n",
"\n",
"##########################\n",
"# Define model callbacks.\n",
"#########################\n",
"\n",
"# TODO: Set the filepath under which you want to save the model.\n",
"model_checkpoint = ModelCheckpoint(filepath= config['train']['saved_weights_name'],\n",
" monitor='val_loss',\n",
" verbose=1,\n",
" save_best_only=True,\n",
" save_weights_only=False,\n",
" mode='auto',\n",
" period=1)\n",
"#model_checkpoint.best =\n",
"\n",
"csv_logger = CSVLogger(filename='log.csv',\n",
" separator=',',\n",
" append=True)\n",
"\n",
"learning_rate_scheduler = LearningRateScheduler(schedule=lr_schedule,\n",
" verbose=1)\n",
"\n",
"terminate_on_nan = TerminateOnNaN()\n",
"\n",
"callbacks = [model_checkpoint,\n",
" csv_logger,\n",
" learning_rate_scheduler,\n",
" terminate_on_nan]\n",
"\n",
"\n",
"\n",
"batch_images, batch_labels = next(train_generator)\n",
"\n",
"\n",
"initial_epoch = 0\n",
"final_epoch = 100 #config['train']['nb_epochs']\n",
"steps_per_epoch = 50\n",
"\n",
"history = model.fit_generator(generator=train_generator,\n",
" steps_per_epoch=steps_per_epoch,\n",
" epochs=final_epoch,\n",
" callbacks=callbacks,\n",
" validation_data=val_generator,\n",
" validation_steps=ceil(val_dataset_size/batch_size),\n",
" initial_epoch=initial_epoch,\n",
" verbose = 1 if config['train']['debug'] else 2)\n",
"\n",
"history_path = config['train']['saved_weights_name'].split('.')[0] + '_history'\n",
"\n",
"np.save(history_path, history.history)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": 15,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"dict_keys(['val_loss', 'loss', 'lr'])\n"
]
},
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAYIAAAEWCAYAAABrDZDcAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAIABJREFUeJzsvXd8XNWd9/8+kka9Ws2Suw1uGGODqQkBQi9LCAQIISHtF7Kb3SzP7iakbbJPNptNnk3d9NBCEhISAiEkAUI31WAb44Z7kS25SLKsLo005fz+OPfM3Llzp0kzI0tz3q+XX6Np994ZS+dzvl1IKTEYDAZD7pI30RdgMBgMhonFCIHBYDDkOEYIDAaDIccxQmAwGAw5jhECg8FgyHGMEBgMBkOOY4TAYIiDEOJ+IcR/JfnaFiHEJeM9jsGQbYwQGAwGQ45jhMBgMBhyHCMEhkmP5ZL5rBBisxBiUAhxrxCiUQjxpBCiXwjxrBCixvb6a4UQbwsheoQQq4UQS2zPrRRCbLDe93ug2HGua4QQG633viaEWD7Ga/6EEGKPEOK4EOLPQohm63EhhPieEKJDCNFrfaZl1nNXCSG2Wdd2SAjxmTF9YQaDAyMEhqnCDcClwELg74AngS8Cdajf838GEEIsBB4E/g9QDzwB/EUIUSiEKAT+BPwamAb8wTou1ntPB+4DPgnUAj8H/iyEKErlQoUQ7wa+AdwENAEHgN9ZT18GvMv6HNXAzUCX9dy9wCellBXAMuD5VM5rMMTCCIFhqvBDKWW7lPIQ8DLwhpTyLSnlCPAosNJ63c3A41LKZ6SUPuDbQAlwHnAO4AG+L6X0SSkfBtbZzvEJ4OdSyjeklAEp5S+BEet9qXArcJ+UcoN1fV8AzhVCzAV8QAWwGBBSyu1SyiPW+3zAUiFEpZSyW0q5IcXzGgyuGCEwTBXabT8Pu9wvt35uRu3AAZBSBoFWYIb13CEZ2YnxgO3nOcC/WW6hHiFEDzDLel8qOK9hALXrnyGlfB74EfBjoF0IcZcQotJ66Q3AVcABIcSLQohzUzyvweCKEQJDrnEYtaADyiePWswPAUeAGdZjmtm2n1uBr0spq23/SqWUD47zGspQrqZDAFLKH0gpzwBOQbmIPms9vk5K+R6gAeXCeijF8xoMrhghMOQaDwFXCyEuFkJ4gH9DuXdeA9YAfuCfhRAFQojrgbNs770b+HshxNlWULdMCHG1EKIixWv4LfBRIcQKK77w3yhXVosQ4kzr+B5gEPACASuGcasQospyafUBgXF8DwZDCCMEhpxCSrkT+CDwQ+AYKrD8d1LKUSnlKHA98BGgGxVP+KPtvetRcYIfWc/vsV6b6jU8B3wZeARlhSwA3m89XYkSnG6U+6gLFccA+BDQIoToA/7e+hwGw7gRZjCNwWAw5DbGIjAYDIYcJ2NCIIS4zyqK2ery3GeEEFIIUZep8xsMBoMhOTJpEdwPXOF8UAgxC1X4czCD5zYYDAZDkmRMCKSULwHHXZ76HnAnYIITBoPBcAJQkM2TCSGuRRXsbIpM1XZ97e3A7QBlZWVnLF68OAtXaDAYDFOHN99885iUsj7R67ImBEKIUuBLqF4qCZFS3gXcBbBq1Sq5fv36DF6dwWAwTD2EEAcSvyq7WUMLgHnAJiFECzAT2CCEmJ7FazAYDAaDg6xZBFLKLajSeEC1DgZWSSmPZesaDAaDwRBNJtNHH0SV7C8SQrQJIT6eqXMZDAaDYexkzCKQUt6S4Pm54zm+z+ejra0Nr9c7nsOc8BQXFzNz5kw8Hs9EX4rBYJiiZDVrKJ20tbVRUVHB3LlzSZSBNFmRUtLV1UVbWxvz5s2b6MsxGAxTlEnbYsLr9VJbWztlRQBACEFtbe2Ut3oMBsPEMmmFAJjSIqDJhc9oMBgmlkktBAaDYYrR8gp07pzoq8g5jBCMkZ6eHn7yk5+k/L6rrrqKnp6eDFyRwTAF+Msd8PJ3Jvoqcg4jBGMklhAEAvGHRj3xxBNUV1dn6rIMhsmNzwu+4Ym+ipxj0mYNTTSf//zn2bt3LytWrMDj8VBeXk5TUxMbN25k27ZtXHfddbS2tuL1ernjjju4/fbbAZg7dy7r169nYGCAK6+8kne+85289tprzJgxg8cee4ySkpIJ/mQGwwQS9Kt/hqwyJYTgq395m22H+9J6zKXNlfzH350S8/lvfvObbN26lY0bN7J69Wquvvpqtm7dGkrzvO+++5g2bRrDw8OceeaZ3HDDDdTW1kYcY/fu3Tz44IPcfffd3HTTTTzyyCN88INm+qAhhwn6IDA60VeRc0wJIYhFUEqkhPy8zGfenHXWWRG5/j/4wQ949NFHAWhtbWX37t1RQjBv3jxWrFgBwBlnnEFLS0vGr9NgOKEJ+iHgm+iryDmmhBDE2rkf6h6md3iUpc1VGb+GsrKy0M+rV6/m2WefZc2aNZSWlnLhhRe61gIUFRWFfs7Pz2d42PhGDTlOwAjBRDClg8V5eRDM0PibiooK+vv7XZ/r7e2lpqaG0tJSduzYweuvv56ZizAYphrGNTQhTAmLIBZ5QljuIZn2wqza2lre8Y53sGzZMkpKSmhsbAw9d8UVV/Czn/2M5cuXs2jRIs4555y0nttgmLIE/UoMDFllygsBqFhBfgYqdH/729+6Pl5UVMSTTz7p+pyOA9TV1bF169bQ45/5zGfSfn0Gw6QiGAQZNK6hCWBqu4astT9T7iGDwZBGdNqoEYKsM6WFQGcLBY0SGAwnPtolZGIEWWdKC4GwuYYMBsMJjrYITEFZ1pnSQpBvXEMGw+QhoF1DxiLINlNaCHSwOGAsAoPhxCfkGjIxgmwztYXAihFIYxIYDCc+Jlg8YUxtIbBcQ4EM6MBY21ADfP/732doaCjNV2QwTHK0AJg6gqwzxYUgc8FiIwQGQ5oJWi3cA6Ng3LlZJWcKytKNvQ31pZdeSkNDAw899BAjIyO8973v5atf/SqDg4PcdNNNtLW1EQgE+PKXv0x7ezuHDx/moosuoq6ujhdeeCHt1zYuNv8BfENwxocn+koMuYbdEgj6Id8zcdeSY0wNIXjy83B0S9TDAsn8kQCFBXmQn6LxM/1UuPKbMZ+2t6F++umnefjhh1m7di1SSq699lpeeuklOjs7aW5u5vHHHwdUD6Kqqiq++93v8sILL1BXV5faNWWDDb8Eb68RAkP2sccGAj4jBFlkSruGBAIhQGbYzHz66ad5+umnWblyJaeffjo7duxg9+7dnHrqqTz77LN87nOf4+WXX6aqKvNdUMeN3wsj7s30DIaMYq8fMCmkWWVqWARxdu4Hj/RRUVzAzJrSjJ1eSskXvvAFPvnJT0Y99+abb/LEE0/whS98gcsuu4yvfOUrGbuOtODzwkh6h/wYDElhFwJTVJZVprRFAFYH0mD6j2tvQ3355Zdz3333MTAwAMChQ4fo6Ojg8OHDlJaW8sEPfpDPfOYzbNiwIeq9JxzGIjBMFMYimDCmhkUQhzyRmWCxvQ31lVdeyQc+8AHOPfdcAMrLy3nggQfYs2cPn/3sZ8nLy8Pj8fDTn/4UgNtvv50rr7ySpqamEy9Y7PeqP0L/CBQUJX69wZAunDECQ9bIASEQGes15GxDfccdd0TcX7BgAZdffnnU+z796U/z6U9/OiPXNG781iS1kX4jBIbsEjRCMFFMfddQnjC9hlLBp4XAxAkMWUbXEYApKssyU18IhGlDnRJ+a26yiRMYsk2Ea8jECLLJpBaCZNJCM+kaygaZTn2NIOAPB+yMEBiyTUSw2FgE2SRjQiCEuE8I0SGE2Gp77FtCiB1CiM1CiEeFENVjPX5xcTFdXV0JF0rlGpqcQiClpKuri+Li4uycUMcHALzGNWTIMkYIJoxMBovvB34E/Mr22DPAF6SUfiHE/wO+AHxuLAefOXMmbW1tdHZ2xn1d77CPgRE/ordkLKeZcIqLi5k5c2Z2TuYfCf9sLAJDtjHpoxNGxoRASvmSEGKu47GnbXdfB9431uN7PB7mzZuX8HU/eG43331mF3u+fiUFqbaZyDV0fABMsNiQfexWgAkWZ5WJXBk/BjwZ60khxO1CiPVCiPWJdv3xKC3MB2DIF0jwSkMoYwiMRWDIPiZ9dMKYECEQQnwJ8AO/ifUaKeVdUspVUspV9fX1Yz5XaaEyeoZGjBAkxG+EwDCB2NNHjRBklawXlAkhPgxcA1wss5ASU1ZkWQSjpndJQowQGCYSkz46YWRVCIQQV6CCwxdIKbMymSVkEYwaiyAhPhMjMEwgpunchJHJ9NEHgTXAIiFEmxDi46gsogrgGSHERiHEzzJ1fo2OEQyOmF+shJisIcNEEjQWwUSRyayhW1wevjdT54uFCRangM4aKqwwQmDIPgFTRzBRTPl8yrIiEyxOGm0RlNUZ15Ah+5iCsgljygtBicdyDZlgcWJ0jKC8wVgEhuxjXEMTxpQXAm0RDJtgcWJ01lBZvRECQ/YJ+kFYS5IpKMsqU14IQsFiYxEkRguBsQgME0HADx5rpKxxDWWVKS8ERQV55AljESSFriwua1Ci4DfmuSGLBP2QX6isAiMEWWXKC4EQgrLCAgZNsDgxfq/6IyypUfeNVWDIJkEf5HuUGJgYQVaZ8kIAUFKYbyqLk8HvhYISKK5U903mkCGbBPyQV6CEwBSUZZWcEIKyogJTWZwMvmE1p7ioQt03FoEhmwQtIcgrMBZBlskJISgdq0XQug4m6VCbMeEfAU+JEQLDxBD0hS0CEyPIKjkjBCnHCI5uhXsvgZaXM3NRJyL+YSgohiLtGjJCYMgiQb8VI/AYIcgyOSIEBXhHR2E0hT53g9YMhKHjmbmoExGf1yEEJkZgyCIBP+RZQmDqCLJKTghBWVE+Fw/8BX6wMnlXz+igurW3Zp7q+L3gKba5howQGLJI0Ad5+UoMTIwgq+SEEJR4ClgwugsGjoIvSatAC4G9NfNUR2cNmRiBYSIIuYZMjCDb5IQQlBXlMz14RN0ZGUjuTb4ctQgKilTAWOQbITBkl4AOFhcYIcgyOSEEpYUFNMsOdSfZxS1Zi6DtTRjuHvvFnUj4vJYICGUVZEoIvH25lY1lSI5gwJY1ZFxD2SQnhKAi30ejsBbrZP3eycQIggG4/yp44+fju8ATBb9VRwAqYJwJIRg6Dt8+GfY8l/5jGyY3urI4z2MKyrJMTghBY7A9fCdpi8ByIcWzCLy9Sih0htFkxz+iYgSgqou9GQgWD3aq76znQPqPbZjchFxDJlicbbI+vH4iqPMfDd8ZTTJGkIxF4O2xbnvHdmEnGr5hlTUElmsoA0KghTWXgvCG5AgGlDWAMDGCLJMTFsG00cPhO+mMEQxrIZgiaZb+EVVHAJmLEWhhTTZ7y5A76PRRU1CWdXJCCKq84xCCXLEIpAxXFoMRAkP2sVcWm4KyrJITQlA+3MaBYIO6k1aLIMUA9IlMwAcymHkh0DMPjGvI4ETHCExBWdbJCSEoGWxjj5xBUBSk1yIYnkIWgf6cnkxbBDpGYCwCgwMdIzAFZVln6guBlBT1t3JQNuD3lI3BIkjGNTQFLAItBCGLoFIt2un+g9TfZyp9nwy5QdCnislMQVnWmfpCMHScfN8ArbKB0fzy1NNH/Um4hkb71W5mMqNdNXYhgPRbBX7jGjLEIOAzBWUTxNQXgp4WAA7KBkbzy1JPH41nEWjXEEz+OIF/RN16rDqCTDWeM8FiQyy0a8gUlGWdqS8E3S0AtMp6RvJKkl/Y9EIVzyLw2oRgsscJ/E6LIEON50wdgSEWEemjxiLIJjkgBKqC9RANDOclGSMIBsJCkKxFMNnjBNoiyLQQhCyCwfQe1zD5cQ6mMf2oskYOCEELlNUjCssZEiXJLWx2t0XcGEEPFJarnye7RaB36B4TIzBMEAFfOGsIOfnjbpOI3BCC6jmUFuUzSGlybah1fKCwPHHWUPUc9fOkjxE4s4Yy5RoyQmBwIRgAZLjXEJiisiwy9YWg5wDUzKW0sIBBkrQItBCU1iqLIJaJOtwNNZYQTHaLIJYQpPtzmToCgxs6OJxvFZSBiRNkkaktBAE/9LRaQpBPvyxWvulEJqfOLCqrU7fafx5xbJ96nbYIJnuMQO/UPbbuo5A5i8DUERjs6LoBnT4K6u/XkBUyJgRCiPuEEB1CiK22x6YJIZ4RQuy2bmsydX4A+g6BDEDNHMoKC+gNWotcosVNL1KlWghc3Bh6p1w9O/L+ZCVkEVjzCDylIPIyFyMIjBgfsCGMtgjyPMoqAGMRZJFMWgT3A1c4Hvs88JyU8mTgOet+5rBSR6mZS0lhPn1Ba5FLVEugXUPaInCLE+hisrI68JRNoRiBJZaZmlJmb9lh4gQGTUgI7BaBEYJskTEhkFK+BBx3PPwe4JfWz78ErsvU+YHw8JPqOZQV5dMdsPzfCS0CSyhKa9Wtm0WgU0eLq6G4KrKmYDISqiwuCj+WiSll9sXfCIFB4xYjMEVlWSPbMYJGKeURAOu2IaNn625RO4zKGZQWFtDttxa5hEJgCxaDu0WgF/6SmsxN88omzspiyMxwmgiLwNQSGCxCMQJPOGvIWARZ44QNFgshbhdCrBdCrO/sHOMoyOIqmHcB5BdQWphPV0gIEixuTteQq0VguYZKtEUw2WMEw1Z5f374sUy4hnzGNWRwIWgPFmshMOmj2SLbQtAuhGgCsG47Yr1QSnmXlHKVlHJVfX392M72jjvgQ38EoLSwgK5Ry/eYqJYg5BqKFyOwuYaKKid/jMDnDaeOajJlEegiPJNCatDoxIF8jy1GYIQgW2RbCP4MfNj6+cPAY9k6cX1FEd2BJF1DviGVMVNiJTW5Zg1p19BUsQi84apiTaaCxSXT1M/GIjBoQq6hfFNQNgFkMn30QWANsEgI0SaE+DjwTeBSIcRu4FLrflaYW1tKP8mmjw6qXateGGNZBIXl6pd2SsQIvOGMIU1hWdhNli58w1BqCaypJTBo7OmjpqAs6xRk6sBSyltiPHVxps4Zj7l1ZaqyGJLLGvKUhhdGtyllw93KLQRhi0BKlXY5GfF7IzOGQAlduhdrvzdsaRnXkEGjd//GNTQhnLDB4nQzs6YEKfLx5RWpQTLxGB1Uu+GQRRDDNVRiCUFRpfpFjjfW8kTH5+IaKrTmN6SzC6Tfa8vGMq4hg4WuIs7LtxWUGSHIFjkjBEUF+TRXlzAsSpN0DZUlsAh6wjvb4ip1O5njBP7h6GCxp1RVZru12BgLAb9yAYRiBMYiMFhEVBZbFoGJEWSNnBECgLm1ZSpOkHKMIEb6qBaAkBBM4jiBfyRaCNKd3aOD7qUmWGxwYE8fNTGCrJNbQlBXSk+gOD0Wgd01NBUsAt9wZDEZqO8Akh/vmfAc1vdoYgQGJ6HKYo+pI5gAcksIasvoCxTjH06ioKywzCp3L4hhEdhcQ6EhLpNYCPwjLsHiUnWbrswhLaiF5WrXZ4TAoAnYew0ZIcg2OSUEc2rLGKAE31CSQgDKKnBaBD6vcnMUTyGLwD/skj5quYbSlTnkt7W6Liw1riFDGNN0bkLJKSHQtQTBRL780YGwEHiKoxcsezEZhHv3T+YYgc8tfTTdriHd2K5YBaKNRWDQ2NNHTdO5rJNTQjBrWimDlJCXbPoouFsEw7aGczBFLAJvdIzAY7mG0hYstk1B85TEtzR2PwvH9qTnvIYTH1fXkLEIskVOCUGxJx8Ky/EE4ixA/lG1O4lnEeiGc9o15CkFkZ+ZfkNHt8Bvboo/Ozkd+F16DYVcQ2mOEXi0RRDHNfTo7bDmR+k5r+HEx9U1ZGIE2SKnhACgsKyKAumLnRvvsw2uB7U4Oi0Cp2tIiMz1GzrwGux+Crr3p//YGiljCEGGsoYKSuK7hoJBGDqe/vYWhhMX0310Qsk5ISgpt9w5sVJI9eITsghKXCwCh2sIMtdvSFsZA+3pP7YmNIvAKQQ6ayjNdQSeYvfvVTPSB0j3Zn+GqYk9fTQvXzV9NAVlWSPnhKC8Si3e/b3O4WkWWgi0f9zNInC6hiBzFoEWrP5MCoEO4jpjBNoiSNPO3GePEZTGHkyjLa5cyCo6vh/evH+ir2LiCdgqi/WtiRFkjZwTgupqVdV6tPOY+wu0G0S7htx2rnqh0kFiyNxMAj07IRsWgTNrqKBQ+WvT5RpyBotjLfTa4sp0XOREYONv4S93mE6sQVsbalC/dwGTNZQtck4I6mvVsJn2YzGmnjldQ64WQY8SAfs0r0xbBJkUAr0gO7OGIL1pnsnWEYQsghxYHLXIDsewUHMFu2sIVDGnsQiyRu4JQZ0Sgu6uLvcXuMYIXFxDdrcQWEKQAYtALxQDMYe5jZ/QTr0o+rnC8mjX0Nt/gnsuTb0rabJ1BNoimMzdXJNFC/1QjgtBwBYsBssiMEKQLXJOCIrLlDunV8cIju+HP/1jeLEfdcsacnENlbgJQSYtgqPpP7bGb8vmcVJYGu0aalsHbWtT/0NNto5Ax2BywiKwft+GYmxMcgU9qtIeIzAFZVkj54SAogoABvusXeemB2HjA3Bkk7qflEXQE5kxBCpGMNof/oVOF6GsoQxaBD5bfr+TwrLoBVsLXqoLtW9Y7fTy8pRFEBhx/768ORQjMK4hRdCnMoXyrCUp3wSLs0lSQiCEuEMIUSkU9wohNgghLsv0xWUESwi8g9Zic+A1ddu1W926xghcLAI31xCkP2CclWCxbafuxM01pD9jqlk9/pGw1RGqWo7R0A9yI300ZBHkuBAEfGG3EFiuIZM+mi2StQg+JqXsAy4D6oGPksV5w2mlsAyJQIz2MzA0BG3r1ePHdqlbN4sg6I/MYBjudnENZajfkHYNDXenb0CMk3hC4HFxDYUsglSFYDhsdejAdKzpb2M5/mTExAgUQX/YLQSWRWCEIFskKwR6EO9VwC+klJtsj00uhMBfUEYFwxzfvTa86zymLYIBtRvR2Qt6cdSvk9LKGophEaQ7TjA6AEXWsTPlHrIHcZ0UlkW7gLxjtAjsje1CFoFLLYG2CAKj6Xe1nWiYGIEi6HdYBB5TUJZFkhWCN4UQT6OE4CkhRAUQzNxlZRZZWEEZXnz7XlEPzD7XJgS2hnNg27lau2bfkPoFdVoEoZkEabQIAn51vtoF6n6mhCBWZTFYMQLHYj0eiyDkGkrCIhjLOSYbJkagCPrDs4rBFJRlmWSF4OPA54EzpZRDgAflHpqUiOIKysUwRYffgNqTYM55qpdPwKcWXp0xBNEWwaBViFZaF3nQTFgEepGoPUndZipOEKuyGNIbLPaPhMVGi63bMYZtQjDVU0hNjEAR8DlcQ6agLJskKwTnAjullD1CiA8C/w5M2p7LBSVVVIkh6o6/payBuoVqR3J8f+QsAoi2CLQQlNVHHjQTMQLtP860EPji1RGUqe9E1wxIOfZgsS9Ji0Cnj47lHCcqq78Jv7kx8rFgMCz2Oe8aCjhcQ6agLJskKwQ/BYaEEKcBdwIHgF9l7KoyjCgq57S8fRQH+mHOO6D2ZPVE1+5o11CURWBVJEcJgeUqSqdFoIVg2nx1mzGLwFbx66SwDGQg7D7ye8N/oKlm9fhdYgRutQTeHiitVT9PFSE4shkOvxX5mN0aynnXkC/SNWQKyrJKskLgl1JK4D3A/0op/xeoyNxlZZiiCsqx/gjnnAt11o772C4lBHqRgrArQ++ah7RF4HANZSJGoHeLJTVqYcy0ELhmDTlcOHahG4tF4HFaBA4hCAaVVVXRbF3bFBGC0f5oa1H//xYUG9eQM33UFJRllWSFoF8I8QXgQ8DjQoh8VJxgcmIt2u2iFqrnKP9+eaOaiDU64IgRWAtWlEXgEIL8ArVoptUisBaOogoon57BYLEX8ovUXAUnzpkE9sVsLDECLTax6ghGegEJFdPdn5+sjAyoAjp7CrCOD1TPVt+vP4d3wK7pozn8fWSZZIXgZmAEVU9wFJgBfCtjV5VprKKyN/yLCOh2OXULwxZBRIzAYREMHlOLmP01muKqyIyX8aKLyYrKobwhszECt4whsAmBtWiNxyLw2y2CGGMwdaC4smls5zhRcRNS7fqrnq1uc9k95JY+auoIskZSQmAt/r8BqoQQ1wBeKeWkjRFQpHb8a4OL6Oy3dmh1J7sLgZtF4LQGNOluPKcXiqIKZbFkaiaBf9jdLQQ2IbAW7BG7EKSY0RNRRxAjWKyFtGKKCYEWdbvr0G4RQG4HjJ3po6ayOKsk22LiJmAtcCNwE/CGEOJ9mbywjGK5ht4ILuFwr7XQ1J6sFqHBzkjXUJRF0BkdKNaku/Gc3kUWVYQtglQ7fiaD3WXjJMo1ZBeCVF1DXpcWEzEsAi0EUyZG4PL96cdCQpDDFkFU+qgpKMsmBYlfAsCXUDUEHQBCiHrgWeDhTF1YRll2A+1Dkt3PzeBwzzCnz65RriEAGUxsEVTOcD9ucVV63TfaIiisUD7zwIhaSJzFbOPFHsR1EsruSYdryOaCyreGlEcJgZU6WmkFi6dC4zkpw4t+hEXgFIIctwicwWITI8gaycYI8rQIWHSl8N4Tj6oZlJ7/KUBwuMdazHTmEIRn9YJ7jCCuayjNweKCErVoljeqxzIRMPaPuNcQQNg6CmUN6QB2ZWoWQTCg/rDtRWvxpr+FgsVToBW1b0htMMARI9BCMEfd5nqMIMo1ZLKGskWyFsHfhBBPAQ9a928GnsjMJWWHimIPFUUFHO6xFviqWeFpZLGyhqS0hCCWa6gyzUIwEIpnUN6gbgeOQv3C9J0DIls/OHFzDeUVqJTWVCwCt+E3bsNpQq6h5sj3TWZGbE373GIEVbPUba67huxWqSkoyyrJBos/C9wFLAdOA+6SUn5urCcVQvyLEOJtIcRWIcSDQogYDurM0lxdwiFtEeTlwzSrp0+Ea6gIEMoi8PYqv2WiGEG6/Pgj/aEMJ8qtHXImLAJ7ENdJoYtrqLhKfUep+O99LkVrbsNpvD1qN6jnPUyFYLG9e6vXxTVUWqtSj3NZCKLSRwtNjCCLJO3ekVI+IqX8Vynlv0gpHx3rCYUQM4B/BlZJKZcB+cD7x3q88dBcXRx2DYHKHIJIIRAiPJMgVp8hTXGVqsJ1NmkbK6MDNiHQFkEGMod8w5FFdHY8zqyhPuUWijd83g23ojVPWfTA4x6QAAAgAElEQVQxdGfX/AK1MEwFIdCxHoiOERQUq89aWmtcQ/k2IcjzKHea7j67/j545XsTc205QFwhEEL0CyH6XP71CyHGkydZAJQIIQqAUuDwOI41ZpqrSxxCYLlcnDUCnmK1o41VTKZJd+O5kX4VKNbHzi/KjBCM9IUFx0lBodqd2V1DxVXKlTQWIXBaBFEtrm3T3zwlU8M1FMsiGLH1tSqtMcHivPzwfS0K2j206few8cHo9xnSQlwhkFJWSCkrXf5VSCkrx3JCKeUh4NvAQeAI0CulfNr5OiHE7UKI9UKI9Z2dnWM5VUKaq0voHvIxNGoFpUIWQXnkCwtKLIsgRp8hTdqFwLZAC5G5WoKR/nDTPDfsvnxvn/qcbot4PEIzD+wxAhcxGbbNg071HCcqETECe/roYPh3rWRabruG3NJH9eOgYmPpLNY0RJD1zB8hRA2qZ9E8oBkoszqaRiClvEtKuUpKuaq+PsbCO05mVKvdaShgvOgquODz0Hx65AujLII0CUHLq7D+F7GftweLASoa028R6G6isSwCiBxX6e1VouE2yzkeIdeQ3SIojR5MM9wdbuBXUDw10ke1RSDyo2MEWghKa3PcInAZVQlKCKRUsbHhnszU0RgmJAX0EmC/lLJTSukD/gicNwHXQXNICKxdaVE5XPQF5Q6xU2C5KEIxglr3A6YqBK//BJ77z9jP24PFoCyCdAeL/V5llhfFsQh0K2oIu4bcMn4SnQciW1nESh+dchaBFSOoaIqOEWihL52W4zGCgGMwjfVz0Ke+P9+QqqOZCjGjE5CJEIKDwDlCiFIhhAAuBrZPwHXQXK0WpYg4gRueYvULOHRM7VadQqHRO9lkO5Ae3692wLHGMdqDxWBVFx9N7tjJ4rU1totFYakjWFwV/k6SxediERSWuriGesPf41SLEVQ2x4kR1CqRzdXcebfh9aBiBPbNj3EPZYSsC4GU8g1URfIGYIt1DXdl+zoAGiuLyRNJCEHIIojTZwhSswikhO4WQLr7hgM+q6bBLgTTlfsgnT1YtGjpa3dDu4YCfrWohSyCVILFbjECh1URDCgfurYIUg1In6joGEGl0yJwxAggcihPLuGWPgrqd92++Rk2QpAJJqQ6WEr5H1LKxVLKZVLKD0kpRxK/K/148vNorCzmcG+CXafe/cYrJoOweyWZXctAR9g/rmcc2LE3nNPoFNLBNAbPR2yVwrHQriG7aHhK0l9HoAXUnjU0FYRgdECJXnF1nBiBFoIcdQ8500e1myjgi4yL5apQZpjJ2yYiTUSlkLqhK44TWQQFheoPPhmLoHt/+Ge3hT0kBLZgsa5APbY78fGTJRnXkN6568+lg8VBf/LWScgisMcISpXfV7vGtIBONdfQSL9a8IurIi2Ckf7IGAHkbsA44HOkj1oWQdAXmSlnXEMZIeeFoKmqOIkYgbUzjdd5VJNsv6HjdiFI0iKYfbbKPNm3OvHxk0WfJ176aGGZcmOEhKAq7OtPNpirB7J4HFlDEN71a7O/xJ41NAWCxTooXFyphE0PoLG3PNeuoVxNIXW6hvJsdQQRFoERgkyQ80Iwo7qEw71egsE4aWkFxeqPduh4+oSguyX8s9su0N6CWlNUATPPTLMQJBMstmIEdjdSvOHzbsSqI7A/p81+u0UwFdJHRywXUJEVhxnpU8IY9EWmj0JuuoakdEkf1ULgV0KgY2XGNZQRcl4ImqtLGPUH+czDm7j3lf2sazmOdOYqe0pgsAOQiYWgKMnGc937wz3341kEhY4FesFFagh6unaOIcsjnkVQqoRA78Z0sBhSsAhi1BFAOFbidVgEUyV9VGd/aavL2xuuy3DGCHLRNaQ7s+a7FZRZFkHdSSDyjGsoQ+S8EFy0qIHzT67jpV2dfO2v27jxZ2vYcNDxy2b3a8eqIdCk4hqqPUkFRpMNFgPMvxCQ0PJy4nMkQ1Lpo2Wqh9Kglcang8WQ/I7dN6x2fPZc8SiLYIrHCIpsQuCMAXlKVQuRXHQN6ThTrPTR/na1aSquMhZBhsh5IZhdW8qvP3426//9Ul6+8yLyBLy82xG8tfu10+Ya2g81c1UDu7gxAke7ixlnKCth7wuJz5EMI33WIuSJ/RrdeK7viLotrow9fD4W/pHoVtfaPx5qX+GMEVhCEAwmd44TFXuMANR3HrIIrO9ACKu6OAeFIOgiBDpGELRcQ+UNaoNgYgQZIeeFwM6saaUsm1HFa3sd5rndIkiHEIz0q8DztHkqCynZGAGoBXvuO9MXJ0jUXgLCi1W/JQRFlbaBPcm6hoajW127WQQFxbYB93oWxCS3CkIxAm0R9IX/f+2uv1ytLg5aRXRurqHRQWUxl09X1rNxDWUEIwQOzl1Qy1sHu8ON6GBsFkG8nig6UFwzT+0C48YIyqOfW3CRsijsAeexMtIfPz4AYSHoO6wWrrz81C0Cnzd6HGZoDKbNItBuIZg6QuCMEYzYhcDW6bZ0Wm7GCHQ1tZtrSG8+yhuUpWhcQxnBCIGD8xbU4QtI1rfYfuG0RSDywsVOsSiuUjuceDtlnToasghiCIGnLDK3WjP/QnWbDqvAm6JFoCuQQ4t0sq6h4UjLyn4M/V3ZO49C+PWTOWAcDKjrd1oEutrY7vrL1Q6kQTchsCyC3jZ1WzHduIYyiBECB2fOraEgT0S6h/SCVVoHeQm+smTaTOhispp5ysIY6or2gzsbztmpW6hGOaYjTjDSF7+GACItAqcQpBIj8DiFwFlH0O2wCPTzk9giGLUt+EVxYgSQw64hK0bg5hrqbVW35Y3GNZRBjBA4KC0sYOXsatbste3S9c40kVsIbEIQp/Hc8f3ql7qkWomLDEabvPaqUydCKPfQ/hfHH0hNxTVkF41UC8p8bhaBIwXV67AItHCk0sriREPv/AvLVcaUpyxOjKDWakI4yYPjqeJmEeQ5LILyRss1ZFpRZwIjBC6cu6COLYd66R22dip691uWIHUUkrQIWpQ1AOGWFU73kLPzqJP5F6pFo31L4muKh7cvsRB4bLvWMVsE3mghKK5UC+SL/w/W3aPcInbXW6rnOBFxBv2LK1VjPbcYQck0tSnItV1vvBhBSAisrCEZiBz9aUgLRghcOG9BLUEJb+yz3EMpWQTWjjaRa2iaJQS6LsEZMNa557FoWGIdqyXxNcUj0XQyiFystGikWlDmG3YJFpfAh/4E0+bD4/8GfYciXUMFU0AI7BYBWAWHVoxA5LknIqSzqeBkwC19VNebDHaqzUFBUXiTkGtCmQWMELiwcnY1RQV54ThByCJQf6itx4fwBWKY74ksgoAPeloTWwQjA/F36uWN6nY8g2qCARiNE4vQFLpYBPke1fcoWf+9fyTaIgCYdSZ89En4wB+UlXPSxeHnQimqk1gIRh31IMWV4RhBYYVy82l0d9l0Dx860XFNH7XN/NC/69ptaDKH0o4RAheKCvI5c+401ux1WgR1tPd5ufg7L/Lg2oPubw4JQYxdS2+rMm9DFoElBFEWQYJsntJataMcz+jKkNsiBYtAfz4hUmsT7ZY1pBECFl4Gtz0GJ18aflxbHVMlRgBhi2C0P/J7BZsQZGAu9YmMm2vI3oBOC4G2Fk3mUNoxQhCDcxfUsrO9n87+kfCCVFbPM9vaGQ0EecvZhkJj7yfjxnFbxhCEXUPO/PF4wWJQaaVlDdA/jollybSXALVT0zs0uxsplV5APm901lAiCqaCReAWI7AsAuf/b1kG5k1MBlyDxfmAZS2FLALjGsoURghicP7Jaqf+/I52qF0Al30dlr6HZ7er3dq2wzGyggqK1AIWSwi6bTUEoGYYFFVFWwSJgsVgja4chxshmRbUGr17tU8yS9kiKEn8OjupFq2diDgLA+0xAqdFUFKjFsOccw25pI8KEb5fYVxDmcYIQQxOnVHF/PoyHnnzkPqlPO+fGMir4LU9XRQW5LG3cwCvL8as4XhtJo7vV0JRPj38WFltZIzAP6KabcULFoM1zH4cboRkWlBrdOaQ3Y3kKU2hoMyljiDhOXX66BSpIwBHjMDx/5uXp+JQgxMgBM/8Bzx0W/bPC+5N5yBshRrXUMYxQhADIQTvO2Mma1uO03JMFf+8vKuT0UCQ9585C39QsqdjwP3NiYSgZm5kYVppXaQ7YCRJ3315Y3osgqKq+K+D8VkEUrqnjyYi1VoFN9rehD99auJy80PZQZZ1U1Slvovh4+5CX1Y/MRZBy8vQtj7754XwhLo8R+NDbRHoTVNhmXqNcQ2lHSMEcbjh9JnkCXj4TZXL/My2dqpLPdx27hwAth2J4R6KJwQdb0P94sjHyuph0BYjSHanXt6gdo9jXeT0NSZjEYSEwJHemYwQhGYRpCgEqWYmubHzcdj4m4kLwOq5xDo7SLvh+o64x4DG6+4bK90tKk41EcVaofRRRzsVLQw6iC6E6TeUIYwQxKGxsph3LaznkQ1tjPqDPL+zg3cvamB+XTmlhfmx4wSxhGCkX/3BTV8W+bjTNRSrBbWTiukq0DbWP4zQMPpUYgRjCBZrsXDWESQi1cwkN/S8W928LNvozqMabVGN9EbHCEBZedkOFo8MKBHweyemr5Nb+iiEXUMVNjeq6TeUEYwQJOB9Z8zkSK+XHz6/m54hH5csbSQvT7B4ekXqFkH7NnXb6BCC0rrI3VisFtROQumGtsyhgB8e+yfo3Bn/vZDcdDLNeFxDel5xqhaBPsd40kf1dzNRQjDqyP6yf9fxXEPZ3Jn3HAj/PBHdT0MxAqcQWDED/XsOKqBuLIK0Y4QgAZcsaaSqxMOPX9hDYX4e71qoisqWNley/Uhf9FhLiCMEW9WtUwjK6tSuSPs+Y42pdBIqKrO5PY7tgrd+DW8/muCTobJXRJ77ztRJyMftCBYnJQR6XvEYhKBgnHOLTziLIIEQlDcoV0k2F7tE87OTIRgjcSKV97oFi/OLIt2RJdUmRpABjBAkoNiTz3tWNBOUqragvEj9si5pqqTf66et22UhLK5SbhenSLRvVc9VzYx8PFRUZv0RxhpT6cStuvj4PnWbrEVQ5KhujUVhmfqjtGf+eIqTEwK9kKeaNQTjn1scsggc9RbHdsNdF4WnrmUKPZ1MYxdS1xiB9X+aTfdQ9zgtgrY34etNkcdJhVD6qEMI8jzq+7D/fhrXUEYwQpAEN62aBcCVy8K+yqVN6g96u5t7qLhKpX860x7b31bWgHPh1c3sdJwgaSFwqUTVdQrHdsV/L1jVy0m4hQDmng9Lrol8LFmLQLu6PElYHk48xWNPHw34w/UZzgW/5RU4vAG2PDS2YyfLyECkZRdhEbh8H7rfUDYDxhGuoTG0wT7wKgRG1O/3WIiZPuoJ1xBoSmqMEGQAIwRJsGxGFc/8y7tCggCwaHoFQsTIHHLrNxQMWkJwSvTrnW0mtFsgUbC4qEItrhEWgRaC3YnN9WRaUGtOuxned1/kY8nu1nUHycrm5M5lp2AcFsFgB2BZZU7XUI/VImTrI2M7drLEjRG4CP1EtJnoblHzLWBsFoG2PvXsgFQJVRY7YgRLr4Vl74t8rKRaBdrH44oyRGGEIElObqwgLy+8ky8tLGBeXZl75pCbEPQcUDtjZ3wAIhvPSQnb/gQNpySX1lne4G4RBEYSdyb19iZ3jlh4SpVZH/DHf50WgupZ8V/neo5xxAi0Oyi/MNo1pBetI5vg2J6xHT8ZnDGCokQWwQS0meg+AE2nqXjRmIRgu7odtxA4LILz/w3O+fvIx5Lp7mtIGSME42BpUyXbjyYpBNpsdhOCkEXQCYc2qMXpzI8ldxHljZGL3PF9UD1b/ZzIPZRMC+p4JDuusrdVFVIVJ1G45naOsaaPaoFsPAX6D0c+13MQak8CBLz9x7EdPxmcMQI9nAbcLb5st5mQUm1Sps1X507VNSRl2CLoGacQOGMEbuh+QyZzKK0YIRgHS5oqaT0+HB5go3HbtbRvBUR4joAdT7HaNQ52wfp71c/Lb07uIuwFSLrF9cmXq/uJAsaJOpwmItmmcD2tY7MGYHzpo1oImlaohcNuWfS0wsyzYM55sOXhzKRr+ketViGO71iLr5tFkO02E4OdyvVWM0c1QEzVIuhtC8eAxmoRxEofdUP3GzKZQ2nFCME4WNqs/qB3OOMERS4dSNu3quZ1haXuByurg649ymd96o3JL9D2fkO6xXXzClWWn0gIkplOFo9kh9P0tkZnSiXLeNJHdepo03J1qzOI/KMqZlA9G5ZdD8d2jj3QGQ9nnyGN/s5jpQdns82EzvSpmTs2IdC/Y9MWhF2AqeI2mCYWpt9QRjBCMA505tCWQw5/pdtMgqNb3QPFmtI62POMypA58+PJX0RFozqPfySyxXX9QrXAxWMkiaE08Uh2lGRvK1SN1SIoHnuweOCoWty0q0xnDvW1AVJZKUveo9pYZCJo7Ow8qolnEcD4e0ilgo4jVWuLIEXXkI4PnHyp2pCMRbRj1RG4YVxDGWFChEAIUS2EeFgIsUMIsV0Ice5EXMd4aago4tQZVfzsxb10DYyEn3DGCEYGVBC38dTYB9MB45lnwfQ4r3NiryWwt7iuXwydu2K7PPwjKqDsiBFsbuthw8Ek/8iSaRPt7VPfw7hcQ+OwCMqnhzNidOaQ9mVXz4byeph/gRKCdLuHElkEsbLCyhuyFyzuaVG31bOhdNoYLIIdKsDddJq633co9WsI+FSgOi+J5ci4hjLCRFkE/wv8TUq5GDgN2D5B1zEuhBB868bl9A37+dKjW8NVxp5iVXylhaBDt5ZIYBFAatYARFYX21tc1y1UqYt9h93f59J5NBCU/MMDG/j8I5uTO3cyoyS133isFoFOHx3LIj1wVFlMuleNDqrr1FF9TctuUAHT/S+O7Rr7j7oPVA9NJ4sRI4hVV5HNNhPdB9TvUGEplExLvfFcxw6oXxR2/Y0lThD0JxcfAOMayhBZFwIhRCXwLuBeACnlqJRy0v6vLp5eyb9cupC/vX2UP2207YaKq8K/rLq1hLPZnJ3GpWphWvqeiIe3H+njHx54M/bsA3veub3Fdf0i9Xgs95BL59EXd3VwqGeYvZ2DjPiTyNNOxiLoGacQhDKTRuK/zg1tEZTUKGHWmUO9rWoHWjlD3T/lvco18td/hdEU3VBSwr2XwR8+Ev2cc16xpqhSCVysLJlstpnoblGfHZRrKOhzFzU3dMZQw5Lw/+9Y4gRBf3TDuVh4itV3Z1xDaWUiLIL5QCfwCyHEW0KIe4QQUVsjIcTtQoj1Qoj1nZ0n9ui+2981nzPm1PCVx97mSK+1KJY3qPbHD9wAmx9SO+94i+G5/wj//FZUh84H1x7kya1HeTtWp1O7RdC9X6UBAtRZQtAZI4XUZTrZb15XO+VAULK3YzD2tWpCMYI4i6feIY7HNQSpZw5Jqb6TCqtFQWVTpEVQ0aSmw4Hy1V/7Qzi+F1b/d+zjPfxx2Lc68vGuvcqa2PNsdD9/57xizZJr4axPxL72dLeZ2PuCuk43eg6ozQPEHpsai75DSuzqF1miKsaWQhrwRbegjofpN5R2JkIICoDTgZ9KKVcCg8DnnS+SUt4lpVwlpVxVX1+f7WtMifw8wXduPA1fIMgPnrOKk278JZz7T6rC9+AaaD4tcU8fl13Ri7vUYuDaygKslgRC7X6P7w/PQi5vUFaJ3SLwDYdnFzhmHrR1D/H8zg4uP0UtQjvbY5zPTmi4fBwffm+rKugqa4j9mniMdW7x0HG1u9WLaoVdCFrDAWTN/AvgjI/Cmh+7D2jp3g9bH4a1d0c+3vKSuvWUwov/E/lcrBjByZfAZV+Lfe3pbjPx8Mdg9TejHw/4oPeQSh0FmxAkGTDu3KFu65coUa2YnnnXEJh+QxlgIoSgDWiTUr5h3X8YJQyTmrl1ZVy0qIEXdnSoWEHdSXDpV+GOTfDJl+C9P0/5mC3HBjnQpXbbMYUg36P+gI9uVrtmPQtZCCtgbAlB/1H4/nJY/Q1139GC+ndrWxHAF69agidfsONoEu6B0CIdxyLoaVW7xWQCgW6MdW6xThUNCcH0cLyk52C0EABc+p8qsPynT0W7oo5sUrd7X4h8bv9L6j3n/yvsfgoOvxV+LlaMIBHpbDMxOqimoelmhHZ621S6cbVTCJK0CPTvlh60VDVrjELgSy5jSGP6DaWdrAuBlPIo0CqEsHwXXAxsy/Z1ZIILF9VztM/LrnbbCEshVEaF1Wdn/7FB3j6cXHm8tgZmVJfEX5jLG+Hg6+pnbRGAChh37lRujcf+SRUpbfmDuu8NWwSj/iC/W9fKRYsamFNbxoL6cnYmIwTJpI/2jqOYDJILSLuhd/86UFzRrB4L+JVLw81NV1wJ13xPWVHONt5aCHyDcOA19bOUsP9lmHc+nHW7ssBe+nb4PbFiBIlIp2tIp8y6CUGPrYYAVNYQJC8EHduV9aKbJlbNHJtrKBhIPkYAxjWUASYqa+jTwG+EEJuBFUAMx+zkQs8qeHFXbJP+S49u4WP3ryMYTJyZ8eKuTubWlnLJkgZ2HOmL/Z7yBrXrg7BFAMp3O3QMXv6OqlGYeZZycXRst8UIqnhmWzvHBkb44DlqZ7h4ekWSQpBEQVlvG1S57L6TJRn3kxt6N223CHyDapGXAXeLAOCkS9SCrhd7zZFNUHuyCjrvfkY91rFdfb/z3qXec86nYMdfVc0IKIsgzwMFRalde3F1+tpM6HTO4ePR/Xl0DYHTNTScrGtoZ+TY1epZ6nypjk4NpGgRlE6bmHGeU5gJEQIp5UbL/79cSnmdlHJKpAA0VZWwqLEitJN34g8E2djaQ3vfCJudRWgOvL4Aa/Z2ccHCepY0VTI4GqC1O8aCqxc7kR+5wOmA8fNfg/kXwk2/AgTseFx1cAQoquCB1w8ws6YkJGSLpldypNdL75CjdYaTgiJ1vFhFRP5RtQsfj0UQz/207l7lxvG6uM2iLIImddtqeSRjXVNeHsw6O2xhgdr5H9kEs89Ru//dT6nH91vxgXnvUrdnf1K5gf76f1QnWWefoWSJ12bC2wcPfTicApsIe/qwLjjUdB9QC7DOniquUr9DyVgEUqoYgc5OA2VlBUZTb48R9KcmBNVz1DnGM8LUEIGpLE4zFy6qZ93+bgZHojty7mofYGhUpWU+/fbRqOftrG/pZtgX4IJFSgggTpxA+5SrZkaa2PUL1W1xFbznJypzZuaZatfq7YP8IvZ1+1izr4tbzppNvtVddfF05dPe2Z7AKhDCmklgLdLHdsPvbg0vzLqCd6ztJcBmdbiIzbp7VGbWfZdHL4wD7WpR1tW7lVoI1qpb7Rd3Y/Y5ynLQQdO+Q2pxbDpN9XHq2qOycFpeVsfR4ltSA9f+AI5shrsuVOKRanxAE6vNxIHXVHfat36T3HHszfa6HULQc0At3jpjR4jk20z0H1EJB3aLYKwppEFfaq4h/X+XrBgaEmKEIM1csLCe0UCQNXuj/5g2tiq/5pzaUp7eFj8Q+OKuDgrz8zhnfi0LGyvIE7DtSIyFWe967W4hUC6Zpe+B634GVdaub/HVcGSj2s0VVfC7da0U5AluXBVerBdpIXDrrOrEPqVsy8NKZLb/Rd0fbw2BPj5Ep4+ODKjPcNKlKvPl7ndD67rw8/1HI4eaVDiEQO+C3Zh1jvVay3rQ8YGmFaqVAsCup5QQaGtAs+x6+PhTasfctm5sFgHEbjOhM3V2PpHccfoOh8XUaREc2x39O5OsEHRYNaARQmD9DqW6QAf8qaWP6pjGWCeiGaIwQpBmzphbQ2lhvqt76K2D3UwrK+Qj581lT8cA+zoHXI6geHFXJ2fNm0ZpYQElhfnMrSuLbm6n0a6hGscfdV6ecgctvir82GJrytie5wgWVfLwm21csqSRhorwGMmmqmIqiguSyxyyTyk7uEbd6kDreOYQaGKljx7ZBDKo8vH/v2fVzv/Bm8MZPQNWMZlGi+XxverxeGMzZ5yufPvaPXRkkypAazxFLZy1J6s0U29vtBAANK+E21fDgndD8xgT4mK1mdBCcHRzcjvvvsOqtqSsPtIiCPjUsZztTJLtN7TtTypeYi+SrB6rRZBi+qiOafQYIUgXRgjSTFFBPuctqGP1ro6owfZvtfawYlY1ly5VC/czMayCwz3D7Gof4IKF4fqJJbFmH0DYNaSLyWJwfHAUX818FTuQAfpkMccHR7nl7MjAqRAihYCx1QIi4Ff593ke2PeCWkx6WwERf/ed8Pgx0kcPvalum09XLrCrv6N2sjseV487LYLCsnA7jUTC5ClRHVztQlC3KNw5duHlltsLNcLTjfJ6+NCjcN2PE39GN2K1mejYHnZF7Xwy8XH6DqmMtZp5kRZB507lz5++PPL1pTWJLYLeNtj4IJx+W7gJHCgXZFFV6imkqaaPljeqDUKiwUuGpDFCkAEuWFRP6/Fh9h8LV+f2DvvY0zHAylnVzKwp5ZTmypjuodU7O0PH0Sy1Zh/0e6MDuG8O1uGVHrbkLYx5TX1eH+/+zmre97M1eE+6AoBDQx5m1pRw/kl1Ua9fNL2Cne39ITHrGhjhofWt/PaNg/x6TQsv7LDcFropXPtWlZVz9ifVDm/HX5VrqLwx9awZO7HSRw9vCDeNA5h/EVTOhLceCFcV2y0CCFsFsTKG7Mw+R53D51VCoJuqQdg9VLcwHHtIN25tJoJBNWxo0dWq7XNSQnBYCcG0+ZFCcHSLunW1CBIIwas/ACS8447o58aSQppq+qgQ6v/QCEHaMEKQAS4MpZGGTfvNbSo+sGK2app12dLpbDjYTUd/dBD0mW1HmVlTwskNYf/ykiblt3e6a7oGRvjUnw+zdOQXPDsQ2yL43dqD9Az52Ha4l8+9rRbCtmEPt5w1O2IEp2bR9Er6vX4O93oZ8Qf40L1rufPhzXzx0S18+bG3+dgv13G01xtuCqf96Wd/Uu0+3350/DUEYEsfdbEI7G6XvHxYeSvsfV65PHxD0YPPtRAkE/+PUYMAACAASURBVLOYdY7aMe9+SgVG7UIw+zzVoO2kS1L/PMmiYxr23XXvQfW5GhbDoitV1pJbxpTGP6LcS5UzlEur71DYdXZ0i/q/qz0p8j3aNaRTQI9uUVXJugfTQAds+CWc9n73/9vqWWHXUMd2+Nn5cHhj/M+aavooqICxcQ2lDSMEGWDWtFJOaijnjxsOhXbUbx3sQQg4bZYSgkuXNiIlPLc9MiDY7/Xx6p4urjhlOsLWkmLx9OghOMGg5N/+sInuIR/VZcUxs4p8gSC/eLWFc+ZP476PnMnTPU0clA0coY4bz3DP6FlsCxh/95ldbDvSx/++fwVvfPFi/vip85AS/rb1SHiU5MHX1YJTPVs1cdv3ouq6Op5AMaj2FM4U1cFjKiA544zI1674ACDh5e+q+06LwCrqS9oiAHjjLnVrF4KCQviHV+HdX072U6SO/mw6uA2q0yeolg6LrlIWw97nYx9Dt93WriFkOMB6dLOKeTiDtKW1qs5Cpxe/9G1Vja4zs9b8SAnkO//V/ZxVM5Vg9bfDb25U59FptrFI1TUEKk7QbbKG0oURggxx+/nz2XKoN7TQb2zt4aT6ciqLlQm8pKmCmTUlUWmkL+zsZDQQ5PJlkYtYU1UxVSWeiMyhu17ex+qdnXz5mqWcu6CWbTGE4PHNRzjS6+UT58/n/JPruf+j5/B++Q02Lfo/NFS6B00XNioheOD1g9z10j5uOWs271kxg8bKYk6fXcPCxnKe2Ho0HCxufUPl34MSAhlQu9HxWgRChOMQmkMb1K1TCGrmwrwLVE8giG0RJCMEZXVqt3zgFXXf6UKpbI49bS4dVM9WwmovbAv19lmkvuuSmvjuIV1DUNkczg7q3q9cZ0c3u8+9sPcbCgZUk73m05Ub5q4LYe09cMr1atqeG1WzVBD9geuVi8lTpgL08Uil+6imeo4SK9OFNC0YIcgQ7z19BnNqS/nuM7sIBiVvHexmpeUWAhWQvXp5Ey/tPsaBrnAs4amtR6krL+L02TURxxNCsKSpgu1H+vAHgtzz8j6+9dROrjp1Oh88ezZLmypp6x6mzxFDkFJy98v7mF+veiEBnD2/lsc/dy3fuPmsmNdfVeKhuaqY53d0MLe2jC9fEzlr+cplTaxrOY5XFKlFou9QeBc9/dSwy2G8FgFED6c59KbK4rHv0jWn36ayicAlRtCc2jXpzzNtQdQAn4wjBMw+V2Vi6YBx5w7lMiqpVi2sT75cua4C0TUrgE0IZoQzyo7vV+4mb29iITj8lmrlcN4/wSeeV8/5hlRfpVjoFNKObfC+XyirI1bnU00gxYIyCGcOmRTStGCEIEN48vP453efzLYjfdz98j66h3ysmBW5uH/sHfPIzxP8dLX6Q/H6Aryws4PLTmkMFXfZWdJUyY6jfVz3k1f5r8e3c+HCer55w3KEEKGxmTsctQZr9nXx9uE+PnH+/IhYQE1ZIcWe+Lnbi5sqyc8TfO/mFZQWRv6hXnVqk0qVH5DhLpvaIhBCWQWQHiEoKIkMFh/eoPLX3XL0F18THl7itAiWXQ+XfT2yGjYeup7ATXCywexzlHtH+8I7tkfm7S+6Uu2IddquE91eorJZWTiF5arnUChQvDz6PfZ+Q3ueA4QKxNedDJ94QbnE4g1YalymRPrK/4FFV6gNQSIhSLWyGMK1BCZOkBaMEGSQ96xoZn5dGd96SnVptFsEAI2Vxdy8ahaPbGjjUM8wr+w+xtBogMtPme52OJY2VeL1BTnaO8KPP3A693x4lc3VpIRgm6Oh3T0v76e2rJD3rkw9hfPOKxZx74dXsWJWddRzCxvLmV9fxt5ua4CNp0wtAprTb1M71lmxrY6k8ZSoRU1K9c8ZKI54bTGs/KCa+FbsuO6yOrW7TdQOXDPnPHXbvHLs1z4e9PkPrAlnDNmF4KRL1Gd89fvu7+87rLrLFlWoz1wzT7mGjm4BhBqG5MTegXTvc+qza3EoKo8vAqAC2Z9vDc9bqJ2vqptHYtfMpFxZDOHqYmMRpAUjBBmkID+POy45GX9QUlqYH/K72/n7CxcgJdz14l7+9vZRKooLOHd+revx/u60Zr7+3mU8968XcPXypohgcmNlEdPKCtluswja+7w8v6ODW8+enXD378bi6ZVcuMh9joAQgquWNdHSZ7ktZq6KnLhVPRtufSi8iIyHU9+nfNVr71I7wKEuVfQVi0v+L3zq9eQX/BgEa+bzxGk/pH3hreM6zpipX6IW+oOvRWYMWXjzSnik9CY1FMctIKtrCDTT5irX0JHNaodfGDUPKiwEx/epupAF7079uu2WmnYRunU/1YzFIiipVnULxiJIC0YIMsw1y5tZPL2Cs+ZNc3X3zKgu4YbTZ/LgulaeevsolyxppLDA/b+l2JPPrWfPoao0evcUiiHYis50wdrfndYc9fp0cOWp0xkKWlO+tD89E7zrTpU7/7cvwCvfU485A8V28j3h+gIbI/4Aq3d2MDyaxBhOlFvtU2/U8octSXbjTDd5eep7PbDGljEUFoLtR/r44uHz6C9sgGf+I7r4TNcQaKbNVwvnkU3u8QGAwnJkfiGjmx9RAf+TLh7fZwgJQRz30FhiBKCsAmMRpAUjBBkmP0/w+0+eyw9vie1e+NRFCwgEJf1ef2hC2FhY2lTJjqP9+AMqWPr0tnbm1qpU1kywtKmS4lLr2GkWgkM9wwzoxn15eXD9z9Ui+Ob9qrVBIheFCz9+fg8f+cU6zvr6s3zx0S1sao3f0/6PG5SPfV9nEmM7M8Xsc6Frt+prBBFCsP/YICMU8kjVh1XcZNtjke91CkHNPJX62dcWWwiEoE9UUti7H1lYoZoUjgdd7d61J/ZrxpI+CipgbCyCtGCEIAtUlXioKI7tA51TW8Z1K2ZQXlQQagU9FpY0VTLqD7L/2CD9Xh9r9h7jMkc9QjoRQjBt/kr2Bxs54/5ezv+f5/nQvW9EZS6lQr/Xx3/9dRvv+p8XuOjbq/nLpsOqFqOoAm55ULkumldG+ZS7B0cjKrmdDI36+dXrBzhr3jQuPaWRP25o4z0/fpWbfr6Gl3Z1RrUDGR4NqDoJYG+cnlB2fIEgr+05RuvxOPMZUkXHCTY9GM4YstCf977+s5VAPPefqjgL1G3/0cj2HvYGczGEQEpJZ0C5jEZnvSN1372TwjKVrdUVwzUUDKoakfzC1I+tLYJU5x8YojBCcILwX9ct48k7zo/KzkmFUMD4SB+rd3biC0guWzp2CyMZLrv2Vp6++EmuO3sRy2dW8/LuYzy8PsWmYxZ/23qEi7/zIve+up/rV86gsbKITz/4Fh/5xToO9QyrHeAnXoAb7ol6778/tpVrf/SKa/tvgIfWtdIz5ONzVyzmuzetYO2XLuEr1yzlYNcQt923lut+/KqqlLZ4ettRBkcDLGmqZG/nYJRQ2HnrYDf/+tBGzvjaM3zgnjf44qNbkv7MHX3emD2nANXxtKBYxUUc2U7aUjnYM8rg+V9S7pdND6onB9oBGW0RaNwyhoCDx4fo8CshaJ2WJiuvdkFsi2D/ajXJbfbZqR+3Zi4ERtIz0jPHMUJwglBSmM+saeMrUFpQX05hfh7bjvTx9LZ2assKWemoR0g3NWWFfPKCBXz5mqX8+AOns3J2NQ+8fiCpCWx2jvZ6+cffvkV9RRGPfuodfOvG0/jTp97BV65ZyvqW43z6t1YRWc2cqCK14dEAz2/voN/r548bokXIHwhyzyv7WTWnhjPmqO+jstjDx945jxfvvJBvXH8qu9oHuPORzaEF/48bDjGjuoT3nzmLgRE/7X0jUccFGBjxc9u9a3l2WzuXLp3OWXOnuTbr+9ZTO/jA3a9HPf7zl/bxiV+tj21FFBTCjFXq5/rIWo59xwYpLVRJAFvLzoOGU2Dt3SpWoEdU2i2CqpmqKWD59HCjQgdv7D/OcVRSw5sFaRolXrsgdoxgw69UYZzuipsKobkExj00XowQTCEKC/I4qaGcza29rN7RwSVL3OsRMslt585h37FBXt17LKX3PbKhjUBQ8uMPnB5KVy3Iz+Nj75zHnVcsZsPBHta3uAdtX9zVwbAvQFWJh/tfa4kSoSe2HqWte5jb3xXdi6moIJ9bzprNF69azEu7Ovn9ulY6+r28vLuT61Y2h/o9xXIP/XFDG/0jfn718bP5zk2ncdHiBjr6R6LcY6/sPsaafV1RTQN1nOKJLUdifzlzzlW3NosgGJS0HBvk4iXK4tt2tB9WfVRVDB/eEK4hqLA1xcvLV8Hb5hUxT/XGvuPsKFjMek7hzf7otOExMW2Bsmic7a2HjqtuscvfP7bGhGYuQdowQjDFWNpcqRacET+XjSPwPFauOrWJaWWF/HpN8n+cwaDkofWtnD1vGnProlMab1w1k+pSDz9/yd3P/MSWo0wrK+Tfr17C3s5BXtkTFiEpJXe9tJf59WVcsiT293Hr2XM4b0Et//X4dn66ei9BCe9dOZMFcYRASskvX2vhtFnVIfFaUK+uf29H+PXBoGR3xwBSwua2cJ2HPxBkq1X38Xg8ITjpEkCoFF2L9n4vw74AZ8+bRl15IdsO98Hym1TLj/W/iGwvYefmX8PV3415qrUtXeyefxvfnfFddrYnFxtJSKwU0s2/V8Hr0z80tuPqViHGIhg3RgimGDpOUOLJ5x0u7aUzTVFBPjefOYtnt7crv34SvLH/OAe6hrj5TPcq5NLCAm47Zw7Pbm+PWpC9vgDP7+jgsqWNXLuimbryIu5/rSX0/NPb2tl6qI/bHZXVTvLyBP/zvuVIKfnFqy0sn1nFSQ3lNFQUUV5UELGwa17Zc4y9nYN85Lzw2MuwcIQD14d6hkMjSjfaMpV2tQ/g9QVZNqOSzW29HOyK4R6afQ7cuS8iwKvjA/Pry8KzKoqrVPX01kegc7uqyC6p4WP3r+MHz+1Wb6w7OTytzsHhnmFajw9z9jw1FW93e3/KLj5XtBDYK4ylVG6hGWeMKQMMUMWD5dPHZxH43V1+uYYRgimGbjVxwcL6MRWRpYNbrUE3v30juT/QP6xvpaKogCuXxe7tf9t5cynMz+OelyN3la/sPsbAiJ8rlk2nqCCfW8+ezfM7Oth/bJBfrWnhH3+zgYWN5VyXRGX1zJpS/v0aVW17vfV6IQQL6svY42IR3P9qC3XlhVx1avi6Z08rxZMvIgRrlzX7OT9P8NbBsBDo1uSfv0L5/uNaBY7CvH1WxtD8unKWNlWy6+gAvkAQVn1MFZ5t+j1UNjNktS350fN7EmYzrd2vXDdnzZvGoukVDI0GkhbzuNTMVW0n7AHjQxtUP6KV0daAlJI1e7v45K/X89W/vB1Kh3Y/9jhSSFvXwf+bq1KScxwjBFOMZTMqmVFdwk1njmNg/DiZWVPKuxc38vt1rYz44xdv9Xl9PLH1CH+3opmSwtjCVVdexA1nzOSRDYfo7A/v4p7cepTK4gLOW6Csn1vPno0nX/DBe97gK4+9zQUL63n4H85LWhTff+YsHvj42dx6TuQuf29HZGrqwa4hnt/ZwQfOmk1RQfjYnvw85tSWsafDLgTq54sWNbCxtScUkN7U1ktlcQHvOKmW02ZVR8UJ4i2A+ztVoLixsoilzZWMBoJKfJpPVxlBgRGobGaP5ZIaDQT59tM74372N/Z3UVFcwJKmylAVvBYxgDV7u/jY/evw+pIryAtRUKjcOPaA8Vu/Um6sZTdEvPSlXZ1c88NXuOXu13ltbxe/eLWFf/jNhtjnrJ6jBKXlleiCunhICU99UYnmE3eqautM8MJ/w10XqU6uJzBGCKYYFcUeXv38u3n34uzHB+x8+Lw5HBsY5aEEqaR/3ngYry/IzasSN6f7xPnz8QWCfO/ZXfgCQUb9QZ7ZdpRLloarsRsqi7lmeTOHeob5p4tO4u7bwv2YkkEIwTtPrsOTH/7TWFBfztE+b7jADfjVmhbyhYgQDM1J9eURFsHu9n6mVxbzroV1HBsYCe2yN7f1sHxmNUIIrjm1iS2HejnQpVJVv/HkdlZ+7ZmYO/J9xwaYV1cW0XBw2+E+1VZj1UfViypnhEToymXTeWzjYba09boeD1Sg+My5qgJ+YaNyce20CcF9r+7n+R0d/OmtQ1HvPTaQwMUyzZZC2nMQtjwMS6+L6OrqDwT59INv0e/1843rT2Xdly7hq9eewrPb27nt3rX0DrvUpyy/GfyjcP/V8IMVan5CvL5Gmu1/gba1cPFXVG3KHz4cHvIT8MGOJ9Sx/vxp+NV16nbLw2rOgh1vn6r63vu8Gtpjp+8IvPq/Kni//S+Rzx3ZBE98NrlrzQJGCAwZ4Z0n1XHm3Bp+8NzumC0dvL4AD649yOLpFSyfWZXwmPPqynj/mbP47RsHueL7L/H9Z3fR5/VHuZS+dt0y/vrpd/KZyxfFjQsky4J6tSjusxZ3ry/AQ+tbuWLZdBpd5jksaCjjYNeQctUAuzr6WTi9gtNmqoDyxtYevL4AO4/2hz73laeqRoN/3niYz/xhMz9/cR/9Xj+v7nbPvtp/bJB5VmB9Xl0ZRQV54cFEp96oJqg1LGZXez+FBXl84/pTmVZWyH8/sd21JqKj38u+Y4OcPU+5oCqKPcyoLmGXlQo7OOLnJWvi3j2v7I+IHTz8Zhtnfv1Zth6KFpmDXUP8ak0Lz3VWMnhkFy9ua4M/fES5ii64M+K1m9p66R32cecVi7jlLNUf68PnzeWHt6zkrdZu/r9frouOWZx8CXxmJ7z3LmV1PP81+OEZ8NZvYheaBXzw7P9VRXjn3QHvu0/FGR77R3j5O/D95fC7W9Sxdj7JQE8nwbf/BI98HL6zEP6zDv6zFr5aA9+cBT85G379XjWNzZ4Z9cr31LkqZyhB0N97MAB/+pTqnfWHj4SLACcQIwSGjCCE4M4rFtPZP8IvXtsf8dzQqJ97Xt7H+f/zAm8f7uMj581Nuvr5v997Knfftgop4Ser91JWmM/5J0cGxcuLClg2I7GwJMtJDVYmkCUET29rp8/r5wNnuQ+4WVBfjj8oOdA1SCAo2d0+wMKGcpY0VVJYkMem1h62HenDH5ShiXUza0pZMaua7z67i0c2tP3/7Z15XJVV/sffX/Z9uQKi7CjiLqKCSppbqWVZZpvaNG1TTYvOr0ZraqapV1Mzr+lXTTPtNf0qm2yybN/dzXIjNUwRBNwRVAQBBYHz++N57oUri4hcbt173q8XL+7zcO59zuHA83nO93wX5k1KITzAm/UtuMzW1jWw92g1yaYQeHl6kBod3FiYyDcY5m6GUXex89BxekUGERbgw90TevNdwRGW/LC/mdnJuj+Q2SThYZ/uQTbPoeW5JdTUNXBtRjz5JZWs2Gk8/ZZXnzLFBT7Zam/ayi+pZNJTK/nTh9vYeqIbgZwg9Kt5RvbY6f+yj3TGMAt5iPEQ0ZRpg3vyl8sGsaGojA82N1+N4BMIQ66G6z+Gm7424iU+/C38cyi8eL5xg351shFjcbLC8Ko6ugsmPWwkSkwYBRMehO0fGdHZkX3g2kVw/3723/Ijgw/+gSuCFlJ30zK44BEjg23WXKNK2wWPwBWvGmJSfQQ+mWfGcRww9h7SZsHYe41VQZFZ5OiHN40a3wMuh/yv4eN5LZu1qo7A1v/CibZToXQGHQ9j1WjOwIhECxP7RvHCil3MzjCS5a3ILeHed7dyuLKG0b268c9rhzKylWyrLSEiXNC/O+NSI3l34z6C/LwcvikebwnE00Nsdv/Fm/YRE+bfar+tuZ3yS6rw8vCgpq6BPt2D8fHyYEDPEDbvPUbPMH8A2yoB4IphsWzZd4xHLxvInJEJbDtQwYYWhGDP0SoaFCRHNuaQ6t8jhC+3FaOUMkTVzxDCvEOVDE80guhmZSbw5ve7+Z//buGhD7eRmdyNmDA/yqpPkbO/nAAfTwb0bDTV9IkO5tv8I9TVN/D5j8VEBPnw0CX9WZFbwsurCpnQtztPfp3LsepakiMD+SLnIAumpNpE3Rob8uW8saRW+sLCV0g79jVk3Ar9pzcb16q8UgbHhhEW0DzdxMxhsby1bjd//XwHkwdEE+jbyq0rLsMQg5zFhikHZaw+Kg7AZ/caKwHxgMQx0Gdy4/uy5hkV7Hqk2aXn/s+KHTQo+GF/Ja8VxnHL2LktXxcMk9c3f4YtiwyxU/Uw9vdG8N7yx2DtM0Zti2WPGrUuZr4GEamw8q+GeMdlQE2F0dddy4zsryi48nUYcFnr1+0EtBBoHMq9k1O56JnV/Gt5Ht6eHjy3Yhd9o4N5YU46wxM7nqLa29ODWZntKDnZCfh4eZBgCWBXSRXF5SdZk1fKneN7t2p2st6gd5VWYm2SYtrc0+LCeHv9HqJD/YkK9iU6tNG0NCcznosGRtMtyAiuyki08PVPhyipOGlXUtTqOprUJOaif88QFm3YS3HFSXqEGiJz/OQp9h87wazu8bZxLLkji1U7S/k2/whrdx1mfeERwgN9CPP35ophve32RlK7B1Nb38CO4uMszy3h8qEx+Hl7ckNWIo99toP/btzLm9/vZs7IBFKjg3lgSQ47D1WSGh1MQ4Pio80HGJMSQWp0MJQZLqRbG5JIHPsnTq/3Vl59ii17j3HnhJQWf6ceHsJDlw5gxnNreXZ5PvOn9G2xndnYiKkYfJX9+X2bqP3uRU7kLuV1r19T/ul2LIE+zM6MN8QnbZZd85q6et7ZsJdJ/aIA4Ymvcrmgf3cSIwI5XFnD3z7fQVp8GLMzzX2i0XfDzq8M2399DaTNbqyklnmrIQAf3G6UcJ31jrGfM+4+o17DuueNLwDESLM+7j5IudAQJwejhUDjUPr1CGH6kJ68vNowD12bEcdDlwxwmmtrR+kVZWwAv5e9jwZlPL23RpCvF9EhfuwqqbTZ41NML5y0uDBe+7aIr7YVMybFPsGgiNhEAAw3ToD1RUeZNrgxMMyabK5p8J11w3j7wQqbEOSZK5imdTBC/LyZNrin3ee1hvV9L68uoLq23rYXc01GPM8szWf+4q10C/ThngtSqamv58EPcvgip5jU6GA2FB1l/7ETzJ9iRkOHJZCX8Si3rArj7wdOMLaPfUbcNfmHaVAwNqX12Jf0+HBmDI3hldWFXDMinvhuradkKSitZNGGvcyfnIqXVdxih/Fu/AM8sGk6kbt9qd65h6raej7ecoCFN2cSEWQf3fxFTjGHK2u5blQifaODmfTkSha8t5UbshJ5YEkOR6pq2bS7rFEIPDzh8hfg+SzD1DP23sYPG34TrH4KdnwCQ65tTKMuApc8Axm/MTKw+oYYiQVbqhXhQPQegcbh3HNhKsMSwnnq6iE8PmPwL04EwLD7Fx2p4t2Ne8lIspDQre1/1N6mcOw8VElMmD9BpiljqFmutKaugbS4tvcxBvQMIcDH02a/t1JQWkVEkA+h/o3eUNayohuLGou555keP1YPoLOld1QQIvDxlgOEB3iTmWwIU4ifty34b8GUvoQGeBMV7MfwhHC+2FYMwAeb9xPg48kF1qSHIkRPuI1SsZC9p3nB+VU7Swn282qxGl5TFkzti5en8KePcqhvI9jtsc928NKqAjYU2V9rTd5hokP8WP+HiWx7ZApv3pRB0ZEqrnrxO7ukgwBvfrebxG4BjOkdQfcQPx68uB/rCo9y28JseoT5cc2IOAoOV9m5MxOeALPfNfYMwpqsWAMsMOJG8Ak2PJWaImIEC0b1M4L9ulgEQAuBpguIswTw3u2juXyo82IbzpVekYGcqlcUHalmZhurgabtd5VWsfPQcbsbcZzFH0ugYQMfHNv2Tc/L04P0+PBmQlB4uIrkCPube5CvF5lJFr4w9wkAcosr8fP2IC68Y8kM/bw9SewWSIOCC/tH25mN7p6YwhNXDrH7XUweEM32gxXkHTrOp1sPMnlAtF023WA/b/p0D2bTbvubs1KKVXmlZPWKaHx6b4XuIX4smNKXFbmlPLDkxxY9oHKLj/PNdsPNc3luo0tnfYNi7a4jnJcSYdvHGJMSyRs3ZlJSUcOVL67lB1OkfjpQwcbdZcwZmWAzAV41PI5fj07kd5P68P7tWVxlimGzfZyEUdD/0uadn/hnmLuledqPnwFaCDSadmBNHeHv7WkXSdwavaOCqKypI/fQcTvTjIjYnnrb4zKbkWQh99BxOx/6giauo02ZOjCagtIq26Z2XslxUqKCz8mF1ipiUwbZ19EO9fdm5rBYu8+21tr+w5IfqThZ12I097CEcDbvOWbnBppfUsnB8pPtrsVx/ehE7hzfm0Ub9vLop83dYZ9fkU+AjydDYkNZtqNRCH7cb7innu5llpFk4a2bMzlRW8/lz63lulfX8fcvd+Dr5WEndCLCny8dwNxJKfh4eTCwZyh+3h7NhLpVPL0gsP2OEV2J04RARDxF5AcR+cRZfdBo2os1lmDqoGibmac97ZVq3B+wcm1GPNePSmjRO+Z0RiRaUAo27TZuNhUnT3G4soakyOZCMHlANCJGtDUYT8YpHTQLWRmZ3I2YMH+yep05b1WcJYCBMSFsKCojIsiXrF7Nb3rp8eEcr6mz7V8ArDTjE8b2aX9urHsu7MOvRyfy6ppCnvgq1yYGe49W8/HWg8zKiGd6Wgz5JZW2HE5r8ozrtJSDa0hcGCt/P577p/blpwMVLM8tZXpazzbnyMfLWLG15Nl1Lhws74S0HmeJM1cEc4HtTry+RtNuQv29eWFOOve15a3ShF5NyoOmniYEF/TvzsPTB7brc4bGh+HtKawvLEMpxbPLjejc1OjgZm2jQvwYFh/O5znFlFefouR4TbNrny03ZCWxev74Vuton84Uc1VwyZAeLZp5rPUgmpqHVucdJjkykNizMGGJCH+a1p+rh8fx7PJd3PLGJo5V1/Liql14CNw8JpmJ/YyaC8t2HLJdp1+PkGabwlYCfb249fxerF4wnievGsKCdsz1iEQLPx2sOKeqfE35aMsBRj2+jIXfd21GVad4DYlILHAx8BfgIGM9KAAACpNJREFUf5zRB43mbJnSRlK804kK9iXY14vK2rpzqhnt5+3JoJhQ1hce4bHPtvPy6kJmZ8ZzfkrLZpQpA6N59NPtfG3ayPucoxAAZ2Vamp4WwydbD9oSD55OQrcALIE+ZO8pY1ZmPBuLjrIm/zA3ZiV2qF9/vWIQqdHBPP75di5+Zg2llTXMHBZrc8tNjgxk6Y4SrhweR/aeMm7MSjrDpxrZbmekt28/KzPJumIrY3yqITz/WpbHW+v22NqEBfgwMtnC6F4RjEy2tFq21poyHeDhj7fRv2cI6Q4uLGXFWSuCp4H5QKtZtUTkNyKyUUQ2lpaWdl3PNJpOQERIjgoiLjygzWR67SEjqRvZe47x8upCrh+VwKOXDWz15jxloPFEbl059Glh5eBI4iwBfDFvLL2jWr6uiJAeH0727jKOVtVy539+IDbcn7smthw/cCZEhBvPS2LxbaMRMTaEbx3by/bziX2jWFdwlOW5JZyqV5zXhntqRxgaH46Xh9j2CfaVVfOPpXlEhfgxJiWCMSkRdAv04e31e7jljY1MenJlq6uHjbvLyNlfwe8npxId6sdvF2bbeyQ5kC5fEYjINKBEKbVJRMa11k4p9RLwEsDw4cM7ISm6RtO1/G5Sytln6myBsSkRvLByFzdmJfHHaf3aTMcRGx7AoJhQftxfTpCvFz1Dm+dCcjbDEsL5Zvshblu4iaNVtbz/29FnlRiwJYbEhfH53DEcLD9pF18xvm8UL68u5MmvduLj5cGIcwhibAl/H08GxYaywRSCp7/JQ0R4YU66LZ4DjOC0ZdtLuP2tbN5YW9Ri4Ny/1xQS6u/NjVlJjEuNZMZza7nr7WwW3pR5Rm+qc8UZK4Is4FIRKQIWARNEZKET+qHROJRxqVFnZU5qjdG9I1g9f/wZRcCKdVVgxAF0banS9pAeb3hNrS88yh+n9eu0vFBW99SmjEi0EOzrRcHhKjISLQ6JYclItLB1Xzk5+8t5P3sfvxqZYCcCYBRsmjqoBxP7RvHKmkK7TLZgbHJ/ua2YWZnx+Pt4MqBnKI/PGMT3BUdtm/+OpMuFQCl1v1IqVimVCFwDLFNKzenqfmg0vyTiLAHtvqlPNYXgXDeKHcXg2DACfTyZNrgHc1pI492ZeHt62NxSO9ssZCUjyUJtfQN3/Ccbf29Pbh/Xq9W2d01M4Vj1qWalXN/4rggR4VejGn8fM9JjWXzbKKYNPveHiTOh4wg0GhcjOTKIBVP6Ovwm21H8fTxZes84nr46rUtWLJMHGm6141LbF6dwtgxPsCACu49Uc/OYZLs0IaeTFhfG+X0ieWV1AdW1xqqgqqaORRv2ctGgHs1WEsMTLV3yO3JqriGl1ApghTP7oNG4Im09lf4ciO7CvYtLBvdgcEyo3d5BZxIa4E1q92CKK05y85gzeyXdPTGFK55fy0urCgjy9eK1b4s4frKuQ55TnYVOOqfRaFwaEXGYCFj5+8whnGpoaNU1tCnDEsI5r3cET3+TBxh7DI9ePpChXeQq2hJaCDQajeYcGdSOdCFNeeiS/iz8fjcz0mNtxYmciRYCjUaj6WJSuge3O7q8K9CbxRqNRuPmaCHQaDQaN0cLgUaj0bg5Wgg0Go3GzdFCoNFoNG6OFgKNRqNxc7QQaDQajZujhUCj0WjcHDm98PPPEREpBTpauy0CONyJ3fml4I7jdscxg3uO2x3HDGc/7gSl1Bmz7f0ihOBcEJGNSqnhzu5HV+OO43bHMYN7jtsdxwyOG7c2DWk0Go2bo4VAo9Fo3Bx3EIKXnN0BJ+GO43bHMYN7jtsdxwwOGrfL7xFoNBqNpm3cYUWg0Wg0mjbQQqDRaDRujksLgYhMEZFcEckXkfuc3R9HICJxIrJcRLaLyDYRmWuet4jI1yKSZ353Xh08ByEiniLyg4h8Yh4nicg6c8zviIiPs/vY2YhImIgsFpEd5pyPcvW5FpHfmX/bOSLytoj4ueJci8i/RaRERHKanGtxbsXgGfPetlVE0s/l2i4rBCLiCTwLTAX6A9eKSH/n9soh1AH3KKX6ASOBO8xx3gcsVUqlAEvNY1djLrC9yfHfgKfMMZcBNzmlV47lH8AXSqm+wBCM8bvsXItIDHA3MFwpNRDwBK7BNef6/4App51rbW6nAinm12+A58/lwi4rBEAGkK+UKlBK1QKLgOlO7lOno5Q6qJTKNl8fx7gxxGCM9XWz2evAZc7poWMQkVjgYuAV81iACcBis4krjjkEGAu8CqCUqlVKHcPF5xqjpK6/iHgBAcBBXHCulVKrgKOnnW5tbqcDbyiD74EwEenR0Wu7shDEAHubHO8zz7ksIpIIDAXWAd2VUgfBEAsgynk9cwhPA/OBBvO4G3BMKVVnHrvifCcDpcBrpknsFREJxIXnWim1H3gC2IMhAOXAJlx/rq20Nreden9zZSGQFs65rK+siAQB7wHzlFIVzu6PIxGRaUCJUmpT09MtNHW1+fYC0oHnlVJDgSpcyAzUEqZNfDqQBPQEAjHMIqfjanN9Jjr1792VhWAfENfkOBY44KS+OBQR8cYQgbeUUu+bpw9Zl4rm9xJn9c8BZAGXikgRhslvAsYKIcw0H4Brzvc+YJ9Sap15vBhDGFx5ricBhUqpUqXUKeB9YDSuP9dWWpvbTr2/ubIQbABSTO8CH4wNpo+c3KdOx7SNvwpsV0o92eRHHwHXm6+vBz7s6r45CqXU/UqpWKVUIsa8LlNKzQaWAzPNZi41ZgClVDGwV0RSzVMTgZ9w4bnGMAmNFJEA82/dOmaXnusmtDa3HwG/Mr2HRgLlVhNSh1BKuewXcBGwE9gFPODs/jhojOdhLAm3ApvNr4swbOZLgTzzu8XZfXXQ+McBn5ivk4H1QD7wLuDr7P45YLxpwEZzvj8Awl19roGHgR1ADvAm4OuKcw28jbEPcgrjif+m1uYWwzT0rHlv+xHDq6rD19YpJjQajcbNcWXTkEaj0WjagRYCjUajcXO0EGg0Go2bo4VAo9Fo3BwtBBqNRuPmaCHQaByMiIyzZkjVaH6OaCHQaDQaN0cLgUZjIiJzRGS9iGwWkRfNegeVIvK/IpItIktFJNJsmyYi35u54Jc0yRPfW0S+EZEt5nt6mR8f1KSOwFtmlKxG87NAC4FGA4hIP+BqIEsplQbUA7MxkpxlK6XSgZXAQ+Zb3gAWKKUGY0R2Ws+/BTyrlBqCkRPHGvY/FJiHURsjGSNfkkbzs8DrzE00GrdgIjAM2GA+rPtjJPhqAN4x2ywE3heRUCBMKbXSPP868K6IBAMxSqklAEqpkwDm561XSu0zjzcDicAaxw9LozkzWgg0GgMBXldK3W93UuSPp7VrKydLW+aemiav69H/e5qfEdo0pNEYLAVmikgU2GrFJmD8j1izXM4C1iilyoEyERljnr8OWKmMOhD7ROQy8zN8RSSgS0eh0XQA/VSi0QBKqZ9E5EHgKxHxwMgAeQdG8ZcBIrIJozrW1eZbrgdeMG/0BcAN5vnrgBdF5BHzM67swmFoNB1CZx/VaNpARCqVUkHO7odG40i0aUij0WjcHL0i0Gg0GjdHrwg0Go3GzdFCoNFoNG6OFgKNRqNxc7QQaDQajZujhUCj0WjcnP8H7qcEV4sozUEAAAAASUVORK5CYII=\n",
"text/plain": [
"<Figure size 432x288 with 1 Axes>"
]
},
"metadata": {
"needs_background": "light"
},
"output_type": "display_data"
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"experimento_ssd7_panel_cell.h5\n"
]
}
],
"source": [
"#Graficar aprendizaje\n",
"\n",
"history_path =config['train']['saved_weights_name'].split('.')[0] + '_history'\n",
"\n",
"hist_load = np.load(history_path + '.npy',allow_pickle=True).item()\n",
"\n",
"print(hist_load.keys())\n",
"\n",
"# summarize history for loss\n",
"plt.plot(hist_load['loss'])\n",
"plt.plot(hist_load['val_loss'])\n",
"plt.title('model loss')\n",
"plt.ylabel('loss')\n",
"plt.xlabel('epoch')\n",
"plt.legend(['train', 'test'], loc='upper left')\n",
"plt.show()\n",
"\n",
"print(config['train']['saved_weights_name'])"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Evaluación del Modelo"
]
},
{
"cell_type": "code",
"execution_count": 25,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Processing image set 'train.txt': 100%|██████████| 1/1 [00:00<00:00, 20.74it/s]\n",
"Processing image set 'test.txt': 100%|██████████| 1/1 [00:00<00:00, 25.40it/s]\n",
"Number of images in the evaluation dataset: 1\n",
"\n",
"Producing predictions batch-wise: 100%|██████████| 1/1 [00:00<00:00, 1.50it/s]\n",
"Matching predictions to ground truth, class 1/1.: 100%|██████████| 200/200 [00:00<00:00, 7283.80it/s]\n",
"Computing precisions and recalls, class 1/1\n",
"Computing average precision, class 1/1\n",
"200 instances of class panel with average precision: 0.8982\n",
"mAP using the weighted average of precisions among classes: 0.8982\n",
"mAP: 0.8982\n",
"panel AP 0.898\n",
"\n",
" mAP 0.898\n"
]
}
],
"source": [
"\n",
"config_path = 'config_7_panel.json'\n",
"\n",
"with open(config_path) as config_buffer:\n",
" config = json.loads(config_buffer.read())\n",
"\n",
" \n",
"model_mode = 'training'\n",
"# TODO: Set the path to the `.h5` file of the model to be loaded.\n",
"model_path = config['train']['saved_weights_name']\n",
"\n",
"# We need to create an SSDLoss object in order to pass that to the model loader.\n",
"ssd_loss = SSDLoss(neg_pos_ratio=3, alpha=1.0)\n",
"\n",
"K.clear_session() # Clear previous models from memory.\n",
"\n",
"model = load_model(model_path, custom_objects={'AnchorBoxes': AnchorBoxes,\n",
" 'L2Normalization': L2Normalization,\n",
" 'DecodeDetections': DecodeDetections,\n",
" 'compute_loss': ssd_loss.compute_loss})\n",
"\n",
"\n",
" \n",
"train_dataset = DataGenerator(load_images_into_memory=False, hdf5_dataset_path=None)\n",
"val_dataset = DataGenerator(load_images_into_memory=False, hdf5_dataset_path=None)\n",
"\n",
"# 2: Parse the image and label lists for the training and validation datasets. This can take a while.\n",
"\n",
"\n",
"\n",
"# The XML parser needs to now what object class names to look for and in which order to map them to integers.\n",
"classes = ['background' ] + labels\n",
"\n",
"train_dataset.parse_xml(images_dirs= [config['train']['train_image_folder']],\n",
" image_set_filenames=[config['train']['train_image_set_filename']],\n",
" annotations_dirs=[config['train']['train_annot_folder']],\n",
" classes=classes,\n",
" include_classes='all',\n",
" #classes = ['background', 'panel', 'cell'], \n",
" #include_classes=classes,\n",
" exclude_truncated=False,\n",
" exclude_difficult=False,\n",
" ret=False)\n",
"\n",
"val_dataset.parse_xml(images_dirs= [config['test']['test_image_folder']],\n",
" image_set_filenames=[config['test']['test_image_set_filename']],\n",
" annotations_dirs=[config['test']['test_annot_folder']],\n",
" classes=classes,\n",
" include_classes='all',\n",
" #classes = ['background', 'panel', 'cell'], \n",
" #include_classes=classes,\n",
" exclude_truncated=False,\n",
" exclude_difficult=False,\n",
" ret=False)\n",
"\n",
"#########################\n",
"# 3: Set the batch size.\n",
"#########################\n",
"batch_size = config['train']['batch_size'] # Change the batch size if you like, or if you run into GPU memory issues.\n",
"\n",
"\n",
"\n",
"\n",
"\n",
"\n",
"\n",
"evaluator = Evaluator(model=model,\n",
" n_classes=n_classes,\n",
" data_generator=val_dataset,\n",
" model_mode='training')\n",
"\n",
"results = evaluator(img_height=img_height,\n",
" img_width=img_width,\n",
" batch_size=4,\n",
" data_generator_mode='resize',\n",
" round_confidences=False,\n",
" matching_iou_threshold=0.5,\n",
" border_pixels='include',\n",
" sorting_algorithm='quicksort',\n",
" average_precision_mode='sample',\n",
" num_recall_points=11,\n",
" ignore_neutral_boxes=True,\n",
" return_precisions=True,\n",
" return_recalls=True,\n",
" return_average_precisions=True,\n",
" verbose=True)\n",
"\n",
"mean_average_precision, average_precisions, precisions, recalls = results\n",
"total_instances = []\n",
"precisions = []\n",
"\n",
"for i in range(1, len(average_precisions)):\n",
" \n",
" print('{:.0f} instances of class'.format(len(recalls[i])),\n",
" classes[i], 'with average precision: {:.4f}'.format(average_precisions[i]))\n",
" total_instances.append(len(recalls[i]))\n",
" precisions.append(average_precisions[i])\n",
"\n",
"if sum(total_instances) == 0:\n",
" \n",
" print('No test instances found.')\n",
"\n",
"else:\n",
"\n",
" print('mAP using the weighted average of precisions among classes: {:.4f}'.format(sum([a * b for a, b in zip(total_instances, precisions)]) / sum(total_instances)))\n",
" print('mAP: {:.4f}'.format(sum(precisions) / sum(x > 0 for x in total_instances)))\n",
"\n",
" for i in range(1, len(average_precisions)):\n",
" print(\"{:<14}{:<6}{}\".format(classes[i], 'AP', round(average_precisions[i], 3)))\n",
" print()\n",
" print(\"{:<14}{:<6}{}\".format('','mAP', round(mean_average_precision, 3)))"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Cargar nuevamente el modelo desde los pesos.\n",
"Predicción"
]
},
{
"cell_type": "code",
"execution_count": 18,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"\n",
"Training on: \t{'panel': 1}\n",
"\n"
]
}
],
"source": [
"from imageio import imread\n",
"from keras.preprocessing import image\n",
"import time\n",
"\n",
"config_path = 'config_7_panel.json'\n",
"input_path = ['panel_jpg/Mision_1/', 'panel_jpg/Mision_2/']\n",
"output_path = 'result_ssd7_panel_cell/'\n",
"\n",
"with open(config_path) as config_buffer:\n",
" config = json.loads(config_buffer.read())\n",
"\n",
"makedirs(output_path)\n",
"###############################\n",
"# Parse the annotations\n",
"###############################\n",
"score_threshold = 0.8\n",
"score_threshold_iou = 0.3\n",
"labels = config['model']['labels']\n",
"categories = {}\n",
"#categories = {\"Razor\": 1, \"Gun\": 2, \"Knife\": 3, \"Shuriken\": 4} #la categoría 0 es la background\n",
"for i in range(len(labels)): categories[labels[i]] = i+1\n",
"print('\\nTraining on: \\t' + str(categories) + '\\n')\n",
"\n",
"img_height = config['model']['input'] # Height of the model input images\n",
"img_width = config['model']['input'] # Width of the model input images\n",
"img_channels = 3 # Number of color channels of the model input images\n",
"n_classes = len(labels) # Number of positive classes, e.g. 20 for Pascal VOC, 80 for MS COCO\n",
"classes = ['background'] + labels\n",
"\n",
"model_mode = 'training'\n",
"# TODO: Set the path to the `.h5` file of the model to be loaded.\n",
"model_path = config['train']['saved_weights_name']\n",
"\n",
"# We need to create an SSDLoss object in order to pass that to the model loader.\n",
"ssd_loss = SSDLoss(neg_pos_ratio=3, alpha=1.0)\n",
"\n",
"K.clear_session() # Clear previous models from memory.\n",
"\n",
"model = load_model(model_path, custom_objects={'AnchorBoxes': AnchorBoxes,\n",
" 'L2Normalization': L2Normalization,\n",
" 'DecodeDetections': DecodeDetections,\n",
" 'compute_loss': ssd_loss.compute_loss})\n",
"\n",
"\n",
"\n",
"\n",
"\n",
"\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"image_paths = []\n",
"for inp in input_path:\n",
" if os.path.isdir(inp):\n",
" for inp_file in os.listdir(inp):\n",
" image_paths += [inp + inp_file]\n",
" else:\n",
" image_paths += [inp]\n",
"\n",
"image_paths = [inp_file for inp_file in image_paths if (inp_file[-4:] in ['.jpg', '.png', 'JPEG'])]\n",
"times = []\n",
"\n",
"\n",
"for img_path in image_paths:\n",
" orig_images = [] # Store the images here.\n",
" input_images = [] # Store resized versions of the images here.\n",
" #print(img_path)\n",
"\n",
" # preprocess image for network\n",
" orig_images.append(imread(img_path))\n",
" img = image.load_img(img_path, target_size=(img_height, img_width))\n",
" img = image.img_to_array(img)\n",
" input_images.append(img)\n",
" input_images = np.array(input_images)\n",
" # process image\n",
" start = time.time()\n",
" y_pred = model.predict(input_images)\n",
" y_pred_decoded = decode_detections(y_pred,\n",
" confidence_thresh=score_threshold,\n",
" iou_threshold=score_threshold_iou,\n",
" top_k=200,\n",
" normalize_coords=True,\n",
" img_height=img_height,\n",
" img_width=img_width)\n",
"\n",
"\n",
" #print(\"processing time: \", time.time() - start)\n",
" times.append(time.time() - start)\n",
" # correct for image scale\n",
"\n",
" # visualize detections\n",
" # Set the colors for the bounding boxes\n",
" colors = plt.cm.brg(np.linspace(0, 1, 21)).tolist()\n",
"\n",
" plt.figure(figsize=(20,12))\n",
" plt.imshow(orig_images[0],cmap = 'gray')\n",
"\n",
" current_axis = plt.gca()\n",
" #print(y_pred)\n",
" for box in y_pred_decoded[0]:\n",
" # Transform the predicted bounding boxes for the 300x300 image to the original image dimensions.\n",
"\n",
" xmin = box[2] * orig_images[0].shape[1] / img_width\n",
" ymin = box[3] * orig_images[0].shape[0] / img_height\n",
" xmax = box[4] * orig_images[0].shape[1] / img_width\n",
" ymax = box[5] * orig_images[0].shape[0] / img_height\n",
"\n",
" color = colors[int(box[0])]\n",
" label = '{}: {:.2f}'.format(classes[int(box[0])], box[1])\n",
" current_axis.add_patch(plt.Rectangle((xmin, ymin), xmax-xmin, ymax-ymin, color=color, fill=False, linewidth=2))\n",
" current_axis.text(xmin, ymin, label, size='x-large', color='white', bbox={'facecolor':color, 'alpha':1.0})\n",
"\n",
" #plt.figure(figsize=(15, 15))\n",
" #plt.axis('off')\n",
" save_path = output_path + img_path.split('/')[-1]\n",
" plt.savefig(save_path)\n",
" plt.close()\n",
" \n",
"file = open(output_path + 'time.txt','w')\n",
"\n",
"file.write('Tiempo promedio:' + str(np.mean(times)))\n",
"\n",
"file.close()\n",
"print('Tiempo Total: {:.3f}'.format(np.sum(times)))\n",
"print('Tiempo promedio por imagen: {:.3f}'.format(np.mean(times)))\n",
"print('OK')"
]
},
{
"cell_type": "code",
"execution_count": 6,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"panel : 69\n",
"cell : 423\n"
]
}
],
"source": [
"\n",
"# Summary instance training\n",
"category_train_list = []\n",
"for image_label in train_dataset.labels:\n",
" category_train_list += [i[0] for i in train_dataset.labels[0]]\n",
"summary_category_training = {train_dataset.classes[i]: category_train_list.count(i) for i in list(set(category_train_list))}\n",
"for i in summary_category_training.keys():\n",
" print(i, ': {:.0f}'.format(summary_category_training[i]))\n"
]
},
{
"cell_type": "code",
"execution_count": 28,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"__________________________________________________________________________________________________\n",
"Layer (type) Output Shape Param # Connected to \n",
"==================================================================================================\n",
"input_1 (InputLayer) (None, 400, 400, 3) 0 \n",
"__________________________________________________________________________________________________\n",
"identity_layer (Lambda) (None, 400, 400, 3) 0 input_1[0][0] \n",
"__________________________________________________________________________________________________\n",
"conv1 (Conv2D) (None, 400, 400, 32) 2432 identity_layer[0][0] \n",
"__________________________________________________________________________________________________\n",
"bn1 (BatchNormalization) (None, 400, 400, 32) 128 conv1[0][0] \n",
"__________________________________________________________________________________________________\n",
"elu1 (ELU) (None, 400, 400, 32) 0 bn1[0][0] \n",
"__________________________________________________________________________________________________\n",
"pool1 (MaxPooling2D) (None, 200, 200, 32) 0 elu1[0][0] \n",
"__________________________________________________________________________________________________\n",
"conv2 (Conv2D) (None, 200, 200, 48) 13872 pool1[0][0] \n",
"__________________________________________________________________________________________________\n",
"bn2 (BatchNormalization) (None, 200, 200, 48) 192 conv2[0][0] \n",
"__________________________________________________________________________________________________\n",
"elu2 (ELU) (None, 200, 200, 48) 0 bn2[0][0] \n",
"__________________________________________________________________________________________________\n",
"pool2 (MaxPooling2D) (None, 100, 100, 48) 0 elu2[0][0] \n",
"__________________________________________________________________________________________________\n",
"conv3 (Conv2D) (None, 100, 100, 64) 27712 pool2[0][0] \n",
"__________________________________________________________________________________________________\n",
"bn3 (BatchNormalization) (None, 100, 100, 64) 256 conv3[0][0] \n",
"__________________________________________________________________________________________________\n",
"elu3 (ELU) (None, 100, 100, 64) 0 bn3[0][0] \n",
"__________________________________________________________________________________________________\n",
"pool3 (MaxPooling2D) (None, 50, 50, 64) 0 elu3[0][0] \n",
"__________________________________________________________________________________________________\n",
"conv4 (Conv2D) (None, 50, 50, 64) 36928 pool3[0][0] \n",
"__________________________________________________________________________________________________\n",
"bn4 (BatchNormalization) (None, 50, 50, 64) 256 conv4[0][0] \n",
"__________________________________________________________________________________________________\n",
"elu4 (ELU) (None, 50, 50, 64) 0 bn4[0][0] \n",
"__________________________________________________________________________________________________\n",
"pool4 (MaxPooling2D) (None, 25, 25, 64) 0 elu4[0][0] \n",
"__________________________________________________________________________________________________\n",
"conv5 (Conv2D) (None, 25, 25, 48) 27696 pool4[0][0] \n",
"__________________________________________________________________________________________________\n",
"bn5 (BatchNormalization) (None, 25, 25, 48) 192 conv5[0][0] \n",
"__________________________________________________________________________________________________\n",
"elu5 (ELU) (None, 25, 25, 48) 0 bn5[0][0] \n",
"__________________________________________________________________________________________________\n",
"pool5 (MaxPooling2D) (None, 12, 12, 48) 0 elu5[0][0] \n",
"__________________________________________________________________________________________________\n",
"conv6 (Conv2D) (None, 12, 12, 48) 20784 pool5[0][0] \n",
"__________________________________________________________________________________________________\n",
"bn6 (BatchNormalization) (None, 12, 12, 48) 192 conv6[0][0] \n",
"__________________________________________________________________________________________________\n",
"elu6 (ELU) (None, 12, 12, 48) 0 bn6[0][0] \n",
"__________________________________________________________________________________________________\n",
"pool6 (MaxPooling2D) (None, 6, 6, 48) 0 elu6[0][0] \n",
"__________________________________________________________________________________________________\n",
"conv7 (Conv2D) (None, 6, 6, 32) 13856 pool6[0][0] \n",
"__________________________________________________________________________________________________\n",
"bn7 (BatchNormalization) (None, 6, 6, 32) 128 conv7[0][0] \n",
"__________________________________________________________________________________________________\n",
"elu7 (ELU) (None, 6, 6, 32) 0 bn7[0][0] \n",
"__________________________________________________________________________________________________\n",
"classes4 (Conv2D) (None, 50, 50, 12) 6924 elu4[0][0] \n",
"__________________________________________________________________________________________________\n",
"classes5 (Conv2D) (None, 25, 25, 12) 5196 elu5[0][0] \n",
"__________________________________________________________________________________________________\n",
"classes6 (Conv2D) (None, 12, 12, 12) 5196 elu6[0][0] \n",
"__________________________________________________________________________________________________\n",
"classes7 (Conv2D) (None, 6, 6, 12) 3468 elu7[0][0] \n",
"__________________________________________________________________________________________________\n",
"boxes4 (Conv2D) (None, 50, 50, 16) 9232 elu4[0][0] \n",
"__________________________________________________________________________________________________\n",
"boxes5 (Conv2D) (None, 25, 25, 16) 6928 elu5[0][0] \n",
"__________________________________________________________________________________________________\n",
"boxes6 (Conv2D) (None, 12, 12, 16) 6928 elu6[0][0] \n",
"__________________________________________________________________________________________________\n",
"boxes7 (Conv2D) (None, 6, 6, 16) 4624 elu7[0][0] \n",
"__________________________________________________________________________________________________\n",
"classes4_reshape (Reshape) (None, 10000, 3) 0 classes4[0][0] \n",
"__________________________________________________________________________________________________\n",
"classes5_reshape (Reshape) (None, 2500, 3) 0 classes5[0][0] \n",
"__________________________________________________________________________________________________\n",
"classes6_reshape (Reshape) (None, 576, 3) 0 classes6[0][0] \n",
"__________________________________________________________________________________________________\n",
"classes7_reshape (Reshape) (None, 144, 3) 0 classes7[0][0] \n",
"__________________________________________________________________________________________________\n",
"anchors4 (AnchorBoxes) (None, 50, 50, 4, 8) 0 boxes4[0][0] \n",
"__________________________________________________________________________________________________\n",
"anchors5 (AnchorBoxes) (None, 25, 25, 4, 8) 0 boxes5[0][0] \n",
"__________________________________________________________________________________________________\n",
"anchors6 (AnchorBoxes) (None, 12, 12, 4, 8) 0 boxes6[0][0] \n",
"__________________________________________________________________________________________________\n",
"anchors7 (AnchorBoxes) (None, 6, 6, 4, 8) 0 boxes7[0][0] \n",
"__________________________________________________________________________________________________\n",
"classes_concat (Concatenate) (None, 13220, 3) 0 classes4_reshape[0][0] \n",
" classes5_reshape[0][0] \n",
" classes6_reshape[0][0] \n",
" classes7_reshape[0][0] \n",
"__________________________________________________________________________________________________\n",
"boxes4_reshape (Reshape) (None, 10000, 4) 0 boxes4[0][0] \n",
"__________________________________________________________________________________________________\n",
"boxes5_reshape (Reshape) (None, 2500, 4) 0 boxes5[0][0] \n",
"__________________________________________________________________________________________________\n",
"boxes6_reshape (Reshape) (None, 576, 4) 0 boxes6[0][0] \n",
"__________________________________________________________________________________________________\n",
"boxes7_reshape (Reshape) (None, 144, 4) 0 boxes7[0][0] \n",
"__________________________________________________________________________________________________\n",
"anchors4_reshape (Reshape) (None, 10000, 8) 0 anchors4[0][0] \n",
"__________________________________________________________________________________________________\n",
"anchors5_reshape (Reshape) (None, 2500, 8) 0 anchors5[0][0] \n",
"__________________________________________________________________________________________________\n",
"anchors6_reshape (Reshape) (None, 576, 8) 0 anchors6[0][0] \n",
"__________________________________________________________________________________________________\n",
"anchors7_reshape (Reshape) (None, 144, 8) 0 anchors7[0][0] \n",
"__________________________________________________________________________________________________\n",
"classes_softmax (Activation) (None, 13220, 3) 0 classes_concat[0][0] \n",
"__________________________________________________________________________________________________\n",
"boxes_concat (Concatenate) (None, 13220, 4) 0 boxes4_reshape[0][0] \n",
" boxes5_reshape[0][0] \n",
" boxes6_reshape[0][0] \n",
" boxes7_reshape[0][0] \n",
"__________________________________________________________________________________________________\n",
"anchors_concat (Concatenate) (None, 13220, 8) 0 anchors4_reshape[0][0] \n",
" anchors5_reshape[0][0] \n",
" anchors6_reshape[0][0] \n",
" anchors7_reshape[0][0] \n",
"__________________________________________________________________________________________________\n",
"predictions (Concatenate) (None, 13220, 15) 0 classes_softmax[0][0] \n",
" boxes_concat[0][0] \n",
" anchors_concat[0][0] \n",
"==================================================================================================\n",
"Total params: 193,120\n",
"Trainable params: 192,448\n",
"Non-trainable params: 672\n",
"__________________________________________________________________________________________________\n"
]
}
],
"source": [
"\n",
"\n",
"\n",
"model.summary()"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.6.8"
}
},
"nbformat": 4,
"nbformat_minor": 2
}