commit 8c0ff1890b62d71ae85ca609f6e579871f769709 Author: Alvaro Farias Date: Thu Jan 16 10:51:32 2020 -0300 :tada: initial commit diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..111b9fa --- /dev/null +++ b/.gitignore @@ -0,0 +1,11 @@ +#ignore ssd folder +panel/ +panel_jpg/ +result_ssd7_panel_1/ +result_ssd7_panel_2/ +Train&Test_A/ +result_ssd7_panel/ +result_ssd7_panel_cell/ +Thermal/ + + diff --git a/.ipynb_checkpoints/Panel_Detector-checkpoint.ipynb b/.ipynb_checkpoints/Panel_Detector-checkpoint.ipynb new file mode 100644 index 0000000..73f4dc4 --- /dev/null +++ b/.ipynb_checkpoints/Panel_Detector-checkpoint.ipynb @@ -0,0 +1,1649 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Detector de Celulas" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "\n", + "\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Cargar el modelo ssd7 \n", + "(https://github.com/pierluigiferrari/ssd_keras#how-to-fine-tune-one-of-the-trained-models-on-your-own-dataset)\n", + "\n", + "Training del SSD7 (modelo reducido de SSD). Parámetros en config_7.json y descargar VGG_ILSVRC_16_layers_fc_reduced.h5\n", + "\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Training on: \t{'panel': 1}\n", + "\n", + "OK create model\n", + "\n", + "Loading pretrained weights VGG.\n", + "\n", + "__________________________________________________________________________________________________\n", + "Layer (type) Output Shape Param # Connected to \n", + "==================================================================================================\n", + "input_1 (InputLayer) (None, 400, 400, 3) 0 \n", + "__________________________________________________________________________________________________\n", + "identity_layer (Lambda) (None, 400, 400, 3) 0 input_1[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv1 (Conv2D) (None, 400, 400, 32) 2432 identity_layer[0][0] \n", + "__________________________________________________________________________________________________\n", + "bn1 (BatchNormalization) (None, 400, 400, 32) 128 conv1[0][0] \n", + "__________________________________________________________________________________________________\n", + "elu1 (ELU) (None, 400, 400, 32) 0 bn1[0][0] \n", + "__________________________________________________________________________________________________\n", + "pool1 (MaxPooling2D) (None, 200, 200, 32) 0 elu1[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2 (Conv2D) (None, 200, 200, 48) 13872 pool1[0][0] \n", + "__________________________________________________________________________________________________\n", + "bn2 (BatchNormalization) (None, 200, 200, 48) 192 conv2[0][0] \n", + "__________________________________________________________________________________________________\n", + "elu2 (ELU) (None, 200, 200, 48) 0 bn2[0][0] \n", + "__________________________________________________________________________________________________\n", + "pool2 (MaxPooling2D) (None, 100, 100, 48) 0 elu2[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv3 (Conv2D) (None, 100, 100, 64) 27712 pool2[0][0] \n", + "__________________________________________________________________________________________________\n", + "bn3 (BatchNormalization) (None, 100, 100, 64) 256 conv3[0][0] \n", + "__________________________________________________________________________________________________\n", + "elu3 (ELU) (None, 100, 100, 64) 0 bn3[0][0] \n", + "__________________________________________________________________________________________________\n", + "pool3 (MaxPooling2D) (None, 50, 50, 64) 0 elu3[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv4 (Conv2D) (None, 50, 50, 64) 36928 pool3[0][0] \n", + "__________________________________________________________________________________________________\n", + "bn4 (BatchNormalization) (None, 50, 50, 64) 256 conv4[0][0] \n", + "__________________________________________________________________________________________________\n", + "elu4 (ELU) (None, 50, 50, 64) 0 bn4[0][0] \n", + "__________________________________________________________________________________________________\n", + "pool4 (MaxPooling2D) (None, 25, 25, 64) 0 elu4[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv5 (Conv2D) (None, 25, 25, 48) 27696 pool4[0][0] \n", + "__________________________________________________________________________________________________\n", + "bn5 (BatchNormalization) (None, 25, 25, 48) 192 conv5[0][0] \n", + "__________________________________________________________________________________________________\n", + "elu5 (ELU) (None, 25, 25, 48) 0 bn5[0][0] \n", + "__________________________________________________________________________________________________\n", + "pool5 (MaxPooling2D) (None, 12, 12, 48) 0 elu5[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv6 (Conv2D) (None, 12, 12, 48) 20784 pool5[0][0] \n", + "__________________________________________________________________________________________________\n", + "bn6 (BatchNormalization) (None, 12, 12, 48) 192 conv6[0][0] \n", + "__________________________________________________________________________________________________\n", + "elu6 (ELU) (None, 12, 12, 48) 0 bn6[0][0] \n", + "__________________________________________________________________________________________________\n", + "pool6 (MaxPooling2D) (None, 6, 6, 48) 0 elu6[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv7 (Conv2D) (None, 6, 6, 32) 13856 pool6[0][0] \n", + "__________________________________________________________________________________________________\n", + "bn7 (BatchNormalization) (None, 6, 6, 32) 128 conv7[0][0] \n", + "__________________________________________________________________________________________________\n", + "elu7 (ELU) (None, 6, 6, 32) 0 bn7[0][0] \n", + "__________________________________________________________________________________________________\n", + "classes4 (Conv2D) (None, 50, 50, 8) 4616 elu4[0][0] \n", + "__________________________________________________________________________________________________\n", + "classes5 (Conv2D) (None, 25, 25, 8) 3464 elu5[0][0] \n", + "__________________________________________________________________________________________________\n", + "classes6 (Conv2D) (None, 12, 12, 8) 3464 elu6[0][0] \n", + "__________________________________________________________________________________________________\n", + "classes7 (Conv2D) (None, 6, 6, 8) 2312 elu7[0][0] \n", + "__________________________________________________________________________________________________\n", + "boxes4 (Conv2D) (None, 50, 50, 16) 9232 elu4[0][0] \n", + "__________________________________________________________________________________________________\n", + "boxes5 (Conv2D) (None, 25, 25, 16) 6928 elu5[0][0] \n", + "__________________________________________________________________________________________________\n", + "boxes6 (Conv2D) (None, 12, 12, 16) 6928 elu6[0][0] \n", + "__________________________________________________________________________________________________\n", + "boxes7 (Conv2D) (None, 6, 6, 16) 4624 elu7[0][0] \n", + "__________________________________________________________________________________________________\n", + "classes4_reshape (Reshape) (None, 10000, 2) 0 classes4[0][0] \n", + "__________________________________________________________________________________________________\n", + "classes5_reshape (Reshape) (None, 2500, 2) 0 classes5[0][0] \n", + "__________________________________________________________________________________________________\n", + "classes6_reshape (Reshape) (None, 576, 2) 0 classes6[0][0] \n", + "__________________________________________________________________________________________________\n", + "classes7_reshape (Reshape) (None, 144, 2) 0 classes7[0][0] \n", + "__________________________________________________________________________________________________\n", + "anchors4 (AnchorBoxes) (None, 50, 50, 4, 8) 0 boxes4[0][0] \n", + "__________________________________________________________________________________________________\n", + "anchors5 (AnchorBoxes) (None, 25, 25, 4, 8) 0 boxes5[0][0] \n", + "__________________________________________________________________________________________________\n", + "anchors6 (AnchorBoxes) (None, 12, 12, 4, 8) 0 boxes6[0][0] \n", + "__________________________________________________________________________________________________\n", + "anchors7 (AnchorBoxes) (None, 6, 6, 4, 8) 0 boxes7[0][0] \n", + "__________________________________________________________________________________________________\n", + "classes_concat (Concatenate) (None, 13220, 2) 0 classes4_reshape[0][0] \n", + " classes5_reshape[0][0] \n", + " classes6_reshape[0][0] \n", + " classes7_reshape[0][0] \n", + "__________________________________________________________________________________________________\n", + "boxes4_reshape (Reshape) (None, 10000, 4) 0 boxes4[0][0] \n", + "__________________________________________________________________________________________________\n", + "boxes5_reshape (Reshape) (None, 2500, 4) 0 boxes5[0][0] \n", + "__________________________________________________________________________________________________\n", + "boxes6_reshape (Reshape) (None, 576, 4) 0 boxes6[0][0] \n", + "__________________________________________________________________________________________________\n", + "boxes7_reshape (Reshape) (None, 144, 4) 0 boxes7[0][0] \n", + "__________________________________________________________________________________________________\n", + "anchors4_reshape (Reshape) (None, 10000, 8) 0 anchors4[0][0] \n", + "__________________________________________________________________________________________________\n", + "anchors5_reshape (Reshape) (None, 2500, 8) 0 anchors5[0][0] \n", + "__________________________________________________________________________________________________\n", + "anchors6_reshape (Reshape) (None, 576, 8) 0 anchors6[0][0] \n", + "__________________________________________________________________________________________________\n", + "anchors7_reshape (Reshape) (None, 144, 8) 0 anchors7[0][0] \n", + "__________________________________________________________________________________________________\n", + "classes_softmax (Activation) (None, 13220, 2) 0 classes_concat[0][0] \n", + "__________________________________________________________________________________________________\n", + "boxes_concat (Concatenate) (None, 13220, 4) 0 boxes4_reshape[0][0] \n", + " boxes5_reshape[0][0] \n", + " boxes6_reshape[0][0] \n", + " boxes7_reshape[0][0] \n", + "__________________________________________________________________________________________________\n", + "anchors_concat (Concatenate) (None, 13220, 8) 0 anchors4_reshape[0][0] \n", + " anchors5_reshape[0][0] \n", + " anchors6_reshape[0][0] \n", + " anchors7_reshape[0][0] \n", + "__________________________________________________________________________________________________\n", + "predictions (Concatenate) (None, 13220, 14) 0 classes_softmax[0][0] \n", + " boxes_concat[0][0] \n", + " anchors_concat[0][0] \n", + "==================================================================================================\n", + "Total params: 186,192\n", + "Trainable params: 185,520\n", + "Non-trainable params: 672\n", + "__________________________________________________________________________________________________\n" + ] + } + ], + "source": [ + "from keras.optimizers import Adam, SGD\n", + "from keras.callbacks import ModelCheckpoint, LearningRateScheduler, TerminateOnNaN, CSVLogger\n", + "from keras import backend as K\n", + "from keras.models import load_model\n", + "from math import ceil\n", + "import numpy as np\n", + "from matplotlib import pyplot as plt\n", + "import os\n", + "import json\n", + "import xml.etree.cElementTree as ET\n", + "\n", + "import sys\n", + "sys.path += [os.path.abspath('../ssd_keras-master')]\n", + "\n", + "from keras_loss_function.keras_ssd_loss import SSDLoss\n", + "from keras_layers.keras_layer_AnchorBoxes import AnchorBoxes\n", + "from keras_layers.keras_layer_DecodeDetections import DecodeDetections\n", + "from keras_layers.keras_layer_DecodeDetectionsFast import DecodeDetectionsFast\n", + "from keras_layers.keras_layer_L2Normalization import L2Normalization\n", + "from ssd_encoder_decoder.ssd_input_encoder import SSDInputEncoder\n", + "from ssd_encoder_decoder.ssd_output_decoder import decode_detections, decode_detections_fast\n", + "from data_generator.object_detection_2d_data_generator import DataGenerator\n", + "from data_generator.object_detection_2d_geometric_ops import Resize\n", + "from data_generator.object_detection_2d_photometric_ops import ConvertTo3Channels\n", + "from data_generator.data_augmentation_chain_original_ssd import SSDDataAugmentation\n", + "from data_generator.object_detection_2d_misc_utils import apply_inverse_transforms\n", + "from eval_utils.average_precision_evaluator import Evaluator\n", + "from data_generator.data_augmentation_chain_variable_input_size import DataAugmentationVariableInputSize\n", + "from data_generator.data_augmentation_chain_constant_input_size import DataAugmentationConstantInputSize\n", + "\n", + "\n", + "def makedirs(path):\n", + " try:\n", + " os.makedirs(path)\n", + " except OSError:\n", + " if not os.path.isdir(path):\n", + " raise\n", + "\n", + " \n", + "makedirs(path_anns)\n", + "\n", + "\n", + "\n", + "K.tensorflow_backend._get_available_gpus()\n", + "\n", + "\n", + "def lr_schedule(epoch):\n", + " if epoch < 80:\n", + " return 0.001\n", + " elif epoch < 100:\n", + " return 0.0001\n", + " else:\n", + " return 0.00001\n", + "\n", + "config_path = 'config_7_panel.json'\n", + "\n", + "\n", + "with open(config_path) as config_buffer:\n", + " config = json.loads(config_buffer.read())\n", + "\n", + "###############################\n", + "# Parse the annotations\n", + "###############################\n", + "path_imgs_training = config['train']['train_image_folder']\n", + "path_anns_training = config['train']['train_annot_folder']\n", + "path_imgs_val = config['test']['test_image_folder']\n", + "path_anns_val = config['test']['test_annot_folder']\n", + "labels = config['model']['labels']\n", + "categories = {}\n", + "#categories = {\"Razor\": 1, \"Gun\": 2, \"Knife\": 3, \"Shuriken\": 4} #la categoría 0 es la background\n", + "for i in range(len(labels)): categories[labels[i]] = i+1\n", + "print('\\nTraining on: \\t' + str(categories) + '\\n')\n", + "\n", + "####################################\n", + "# Parameters\n", + "###################################\n", + " #%%\n", + "img_height = config['model']['input'] # Height of the model input images\n", + "img_width = config['model']['input'] # Width of the model input images\n", + "img_channels = 3 # Number of color channels of the model input images\n", + "mean_color = [123, 117, 104] # The per-channel mean of the images in the dataset. Do not change this value if you're using any of the pre-trained weights.\n", + "swap_channels = [2, 1, 0] # The color channel order in the original SSD is BGR, so we'll have the model reverse the color channel order of the input images.\n", + "n_classes = len(labels) # Number of positive classes, e.g. 20 for Pascal VOC, 80 for MS COCO\n", + "scales_pascal = [0.1, 0.2, 0.37, 0.54, 0.71, 0.88, 1.05] # The anchor box scaling factors used in the original SSD300 for the Pascal VOC datasets\n", + "#scales_coco = [0.07, 0.15, 0.33, 0.51, 0.69, 0.87, 1.05] # The anchor box scaling factors used in the original SSD300 for the MS COCO datasets\n", + "scales = scales_pascal\n", + "aspect_ratios = [[1.0, 2.0, 0.5],\n", + " [1.0, 2.0, 0.5, 3.0, 1.0/3.0],\n", + " [1.0, 2.0, 0.5, 3.0, 1.0/3.0],\n", + " [1.0, 2.0, 0.5, 3.0, 1.0/3.0],\n", + " [1.0, 2.0, 0.5],\n", + " [1.0, 2.0, 0.5]] # The anchor box aspect ratios used in the original SSD300; the order matters\n", + "two_boxes_for_ar1 = True\n", + "steps = [8, 16, 32, 64, 100, 300] # The space between two adjacent anchor box center points for each predictor layer.\n", + "offsets = [0.5, 0.5, 0.5, 0.5, 0.5, 0.5] # The offsets of the first anchor box center points from the top and left borders of the image as a fraction of the step size for each predictor layer.\n", + "clip_boxes = False # Whether or not to clip the anchor boxes to lie entirely within the image boundaries\n", + "variances = [0.1, 0.1, 0.2, 0.2] # The variances by which the encoded target coordinates are divided as in the original implementation\n", + "normalize_coords = True\n", + "\n", + "K.clear_session() # Clear previous models from memory.\n", + "\n", + "\n", + "model_path = config['train']['saved_weights_name']\n", + "# 3: Instantiate an optimizer and the SSD loss function and compile the model.\n", + "# If you want to follow the original Caffe implementation, use the preset SGD\n", + "# optimizer, otherwise I'd recommend the commented-out Adam optimizer.\n", + "\n", + "\n", + "if config['model']['backend'] == 'ssd7':\n", + " #weights_path = 'VGG_ILSVRC_16_layers_fc_reduced.h5'\n", + " scales = [0.08, 0.16, 0.32, 0.64, 0.96] # An explicit list of anchor box scaling factors. If this is passed, it will override `min_scale` and `max_scale`.\n", + " aspect_ratios = [0.5 ,1.0, 2.0] # The list of aspect ratios for the anchor boxes\n", + " two_boxes_for_ar1 = True # Whether or not you want to generate two anchor boxes for aspect ratio 1\n", + " steps = None # In case you'd like to set the step sizes for the anchor box grids manually; not recommended\n", + " offsets = None\n", + "\n", + "if os.path.exists(model_path):\n", + " print(\"\\nLoading pretrained weights.\\n\")\n", + " # We need to create an SSDLoss object in order to pass that to the model loader.\n", + " ssd_loss = SSDLoss(neg_pos_ratio=3, alpha=1.0)\n", + "\n", + " K.clear_session() # Clear previous models from memory.\n", + " model = load_model(model_path, custom_objects={'AnchorBoxes': AnchorBoxes,\n", + " 'L2Normalization': L2Normalization,\n", + " 'compute_loss': ssd_loss.compute_loss})\n", + "\n", + "\n", + "else:\n", + " ####################################\n", + " # Build the Keras model.\n", + " ###################################\n", + "\n", + " if config['model']['backend'] == 'ssd300':\n", + " #weights_path = 'VGG_VOC0712Plus_SSD_300x300_ft_iter_160000.h5'\n", + " from models.keras_ssd300 import ssd_300 as ssd\n", + "\n", + " model = ssd_300(image_size=(img_height, img_width, img_channels),\n", + " n_classes=n_classes,\n", + " mode='training',\n", + " l2_regularization=0.0005,\n", + " scales=scales,\n", + " aspect_ratios_per_layer=aspect_ratios,\n", + " two_boxes_for_ar1=two_boxes_for_ar1,\n", + " steps=steps,\n", + " offsets=offsets,\n", + " clip_boxes=clip_boxes,\n", + " variances=variances,\n", + " normalize_coords=normalize_coords,\n", + " subtract_mean=mean_color,\n", + " swap_channels=swap_channels)\n", + "\n", + "\n", + " elif config['model']['backend'] == 'ssd7':\n", + " #weights_path = 'VGG_ILSVRC_16_layers_fc_reduced.h5'\n", + " from models.keras_ssd7 import build_model as ssd\n", + " scales = [0.08, 0.16, 0.32, 0.64, 0.96] # An explicit list of anchor box scaling factors. If this is passed, it will override `min_scale` and `max_scale`.\n", + " aspect_ratios = [0.5 ,1.0, 2.0] # The list of aspect ratios for the anchor boxes\n", + " two_boxes_for_ar1 = True # Whether or not you want to generate two anchor boxes for aspect ratio 1\n", + " steps = None # In case you'd like to set the step sizes for the anchor box grids manually; not recommended\n", + " offsets = None\n", + " model = ssd(image_size=(img_height, img_width, img_channels),\n", + " n_classes=n_classes,\n", + " mode='training',\n", + " l2_regularization=0.0005,\n", + " scales=scales,\n", + " aspect_ratios_global=aspect_ratios,\n", + " aspect_ratios_per_layer=None,\n", + " two_boxes_for_ar1=two_boxes_for_ar1,\n", + " steps=steps,\n", + " offsets=offsets,\n", + " clip_boxes=clip_boxes,\n", + " variances=variances,\n", + " normalize_coords=normalize_coords,\n", + " subtract_mean=None,\n", + " divide_by_stddev=None)\n", + "\n", + " else :\n", + " print('Wrong Backend')\n", + "\n", + "\n", + "\n", + " print('OK create model')\n", + " #sgd = SGD(lr=config['train']['learning_rate'], momentum=0.9, decay=0.0, nesterov=False)\n", + "\n", + " # TODO: Set the path to the weights you want to load. only for ssd300 or ssd512\n", + "\n", + " weights_path = '../ssd_keras-master/VGG_ILSVRC_16_layers_fc_reduced.h5'\n", + " print(\"\\nLoading pretrained weights VGG.\\n\")\n", + " model.load_weights(weights_path, by_name=True)\n", + "\n", + " # 3: Instantiate an optimizer and the SSD loss function and compile the model.\n", + " # If you want to follow the original Caffe implementation, use the preset SGD\n", + " # optimizer, otherwise I'd recommend the commented-out Adam optimizer.\n", + "\n", + "\n", + " #adam = Adam(lr=0.001, beta_1=0.9, beta_2=0.999, epsilon=1e-08, decay=0.0)\n", + " #sgd = SGD(lr=0.001, momentum=0.9, decay=0.0, nesterov=False)\n", + " optimizer = Adam(lr=config['train']['learning_rate'], beta_1=0.9, beta_2=0.999, epsilon=1e-08, decay=0.0)\n", + " ssd_loss = SSDLoss(neg_pos_ratio=3, alpha=1.0)\n", + " model.compile(optimizer=optimizer, loss=ssd_loss.compute_loss)\n", + "\n", + " model.summary()\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Instanciar los generadores de datos y entrenamiento del modelo.\n", + "\n", + "*Cambio realizado para leer png y jpg. keras-ssd-master/data_generator/object_detection_2d_data_generator.py función parse_xml\n" + ] + }, + { + "cell_type": "code", + "execution_count": 26, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "\n", + "Processing image set 'train.txt': 0%| | 0/1 [00:006}\".format(train_dataset_size))\n", + "print(\"Number of images in the validation dataset:\\t{:>6}\".format(val_dataset_size))\n", + "\n", + "\n", + "\n", + "##########################\n", + "# Define model callbacks.\n", + "#########################\n", + "\n", + "# TODO: Set the filepath under which you want to save the model.\n", + "model_checkpoint = ModelCheckpoint(filepath= config['train']['saved_weights_name'],\n", + " monitor='val_loss',\n", + " verbose=1,\n", + " save_best_only=True,\n", + " save_weights_only=False,\n", + " mode='auto',\n", + " period=1)\n", + "#model_checkpoint.best =\n", + "\n", + "csv_logger = CSVLogger(filename='log.csv',\n", + " separator=',',\n", + " append=True)\n", + "\n", + "learning_rate_scheduler = LearningRateScheduler(schedule=lr_schedule,\n", + " verbose=1)\n", + "\n", + "terminate_on_nan = TerminateOnNaN()\n", + "\n", + "callbacks = [model_checkpoint,\n", + " csv_logger,\n", + " learning_rate_scheduler,\n", + " terminate_on_nan]\n", + "\n", + "\n", + "\n", + "batch_images, batch_labels = next(train_generator)\n", + "\n", + "\n", + "initial_epoch = 0\n", + "final_epoch = 100 #config['train']['nb_epochs']\n", + "steps_per_epoch = 50\n", + "\n", + "history = model.fit_generator(generator=train_generator,\n", + " steps_per_epoch=steps_per_epoch,\n", + " epochs=final_epoch,\n", + " callbacks=callbacks,\n", + " validation_data=val_generator,\n", + " validation_steps=ceil(val_dataset_size/batch_size),\n", + " initial_epoch=initial_epoch,\n", + " verbose = 1 if config['train']['debug'] else 2)\n", + "\n", + "history_path = config['train']['saved_weights_name'].split('.')[0] + '_history'\n", + "\n", + "np.save(history_path, history.history)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 27, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "dict_keys(['val_loss', 'loss', 'lr'])\n" + ] + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYIAAAEWCAYAAABrDZDcAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAIABJREFUeJzsnXd4m9W9xz9Hw5Z3HO/YGQ7ZCdmELPbes6xCW2bpAm5bKNzuXm5LCx0UKGXTUhouZe8VEkIWIXuH7DjLM463LUvn/nHeV8uyLTmyJFvn8zx5tF6975Ejne/5zSOklGg0Go0mcbHEegAajUajiS1aCDQajSbB0UKg0Wg0CY4WAo1Go0lwtBBoNBpNgqOFQKPRaBIcLQQaTRcIIZ4XQtwf4rF7hBBnHut5NJpoo4VAo9FoEhwtBBqNRpPgaCHQ9HkMl8zdQoj1QohGIcQzQogCIcT7Qoh6IcQnQohsn+MvFkJsEkLUCiEWCiHG+rw2RQix2njf/wGOgGtdKIRYa7x3qRBiYg/HfKsQYocQokYI8ZYQYpDxvBBC/FkIUSGEOGp8pgnGa+cLITYbYzsghPhxj/5gGk0AWgg0/YUrgLOAUcBFwPvAfwO5qO/5HQBCiFHAPOAuIA94D3hbCJEkhEgC3gBeAAYC/zHOi/HeqcCzwLeBHOAJ4C0hRHI4AxVCnA78DrgKKAL2Ai8ZL58NnGx8jgHA1UC18dozwLellBnABODTcK6r0XSGFgJNf+ERKWW5lPIA8DnwhZRyjZSyFXgdmGIcdzXwrpTyYymlE3gISAFmAzMBO/AXKaVTSvkK8KXPNW4FnpBSfiGldEkp/wG0Gu8Lh68Dz0opVxvjuw+YJYQYBjiBDGAMIKSUW6SUh4z3OYFxQohMKeURKeXqMK+r0QRFC4Gmv1Duc785yON04/4g1AocACmlGygDio3XDkj/Tox7fe4PBX5kuIVqhRC1wGDjfeEQOIYG1Kq/WEr5KfAo8BhQLoR4UgiRaRx6BXA+sFcI8ZkQYlaY19VogqKFQJNoHERN6IDyyaMm8wPAIaDYeM5kiM/9MuB/pZQDfP6lSinnHeMY0lCupgMAUsq/SimnAeNRLqK7jee/lFJeAuSjXFgvh3ldjSYoWgg0icbLwAVCiDOEEHbgRyj3zlJgGdAO3CGEsAkhLgdm+Lz3KeB2IcSJRlA3TQhxgRAiI8wx/Bu4UQgx2Ygv/BblytojhDjBOL8daARaAJcRw/i6ECLLcGnVAa5j+DtoNB60EGgSCinlNuB64BGgChVYvkhK2SalbAMuB74FHEHFE17zee9KVJzgUeP1Hcax4Y5hPvBz4FWUFXIccI3xciZKcI6g3EfVqDgGwA3AHiFEHXC78Tk0mmNG6I1pNBqNJrHRFoFGo9EkOFoINBqNJsHRQqDRaDQJjhYCjUajSXBssR5AKOTm5sphw4bFehgajUbTp1i1alWVlDKvu+P6hBAMGzaMlStXxnoYGo1G06cQQuzt/ijtGtJoNJqERwuBRqPRJDhaCDQajSbB6RMxgmA4nU72799PS0tLrIfSqzgcDkpKSrDb7bEeikaj6af0WSHYv38/GRkZDBs2DP9mkf0HKSXV1dXs37+f0tLSWA9Ho9H0U/qsa6ilpYWcnJx+KwIAQghycnL6vdWj0WhiS58VAqBfi4BJInxGjUYTW/q0EHRLy1GoPxzrUWg0Gk1c07+FoLVBCUEvtNqura3lb3/7W9jvO//886mtrY34eDQajaan9G8hsKcAEtoj72PvTAhcrq43jXrvvfcYMGBAxMej0Wg0PaXPZg2FhD1F3TqbvfcjxL333svOnTuZPHkydrud9PR0ioqKWLt2LZs3b+bSSy+lrKyMlpYW7rzzTm677TbA2y6joaGB8847j7lz57J06VKKi4t58803SUmJ7Dg1Go2mO/qFEPz67U1sPlgX/MW2BrAeBWtyWOccNyiTX140vtPXH3jgATZu3MjatWtZuHAhF1xwARs3bvSkeT777LMMHDiQ5uZmTjjhBK644gpycnL8zrF9+3bmzZvHU089xVVXXcWrr77K9dfr3Qc1Gk106RdC0CXCAm43WHv3MjNmzPDL9f/rX//K66+/DkBZWRnbt2/vIASlpaVMnjwZgGnTprFnz57eHaRGo9EEoV8IQVcrd2r3QXMtFB4PvZiKmZaW5rm/cOFCPvnkE5YtW0Zqaiqnnnpq0FqA5GSvlWK1Wmlubu618Wk0Gk1n9O9gMajYgHSByxnR02ZkZFBfXx/0taNHj5KdnU1qaipbt25l+fLlEb22RqPRRJJ+YRF0ic0IvrY3gy0pYqfNyclhzpw5TJgwgZSUFAoKCjyvnXvuufz9739n4sSJjB49mpkzZ0bsuhqNRhNphOyFHHsAIcSzwIVAhZRyQsBrPwYeBPKklFXdnWv69OkycGOaLVu2MHbs2O4H4nbB4fWQUQgZRWF8gvgh5M+q0Wg0PgghVkkpp3d3XG+6hp4Hzg18UggxGDgL2NeL1/ZisaqMIaf2v2s0Gk0wek0IpJSLgJogL/0ZuAfoHVMkGPYULQQajUbTCVENFgshLgYOSCnXhXDsbUKIlUKIlZWVlcd2YXsKuNrA3X5s59FoNJp+SNSEQAiRCvwU+EUox0spn5RSTpdSTs/Lyzu2i9tT1a22CjQajaYD0bQIjgNKgXVCiD1ACbBaCFHY61f2bTWh0Wg0Gj+ilj4qpdwA5JuPDTGYHkrW0DFjtYPFpoVAo9FogtBrFoEQYh6wDBgthNgvhLi5t64VEhEOGPe0DTXAX/7yF5qamiI2Fo1GozkWejNr6FopZZGU0i6lLJFSPhPw+rCoWAMmthTVjjpCdRNaCDQaTX+h/1cWm1isgFRCEIGeQ75tqM866yzy8/N5+eWXaW1t5bLLLuPXv/41jY2NXHXVVezfvx+Xy8XPf/5zysvLOXjwIKeddhq5ubksWLDg2D+bRqPRHAP9QwjevxcOb+j6GFcbuFohKR0IQQgKj4fzHuj0Zd821B999BGvvPIKK1asQErJxRdfzKJFi6isrGTQoEG8++67gOpBlJWVxZ/+9CcWLFhAbm5uGB9So9Foeof+33TOxLQCeqGlxkcffcRHH33ElClTmDp1Klu3bmX79u0cf/zxfPLJJ/zkJz/h888/JysrK+LX1mg0mmOlf1gEXazcPTQfgSN7IG9MxHcrk1Jy33338e1vf7vDa6tWreK9997jvvvu4+yzz+YXvwipjEKj0WiiRgJZBMZHle6InM63DfU555zDs88+S0NDAwAHDhygoqKCgwcPkpqayvXXX8+Pf/xjVq9e3eG9Go1GE2v6h0UQChEWAt821Oeddx7XXXcds2bNAiA9PZ1//etf7Nixg7vvvhuLxYLdbufxxx8H4LbbbuO8886jqKhIB4s1Gk3M6bU21JHkmNpQm7Q1QtVXMHA4OPqWr163odZoND0hHtpQxxcRtgg0Go2mv5B4QuDWQqDRaDS+9GkhCMut1Uctgr7gutNoNH2bPisEDoeD6urq0CdKjxC4em9QEUZKSXV1NQ6HI9ZD0Wg0/Zg+mzVUUlLC/v37CWvTmtoKcLSC40jvDSzCOBwOSkpKYj0MjUbTj+mzQmC32yktLQ3vTf97Bkz7Fpz7214Zk0aj0fRF+qxrqEckpYGzMdaj0Gg0mrgiwYQgFdp0+2eNRqPxJbGEwJ4GTi0EGo1G40tiCUFSqqow1mg0Go2HxBICe6q2CDQajSaAxBKCpDQdI9BoNJoAEksI7Kk6a0ij0WgC6DUhEEI8K4SoEEJs9HnuQSHEViHEeiHE60KIAb11/aDorCGNRqPpQG9aBM8D5wY89zEwQUo5EfgKuK8Xr98RnTWk0Wg0Heg1IZBSLgJqAp77SErZbjxcDkS3d4KZNaQbuWk0Go2HWMYIbgLe7+xFIcRtQoiVQoiVYfUT6gp7qmo652qLzPk0Go2mHxATIRBC/BRoB17s7Bgp5ZNSyulSyul5eXmRuXBSmrrVtQQajUbjIepN54QQ3wQuBM6Q0W62b09Vt84mYGBUL63RaDTxSlSFQAhxLvAT4BQpZfSjth6LQAeMNRqNxqQ300fnAcuA0UKI/UKIm4FHgQzgYyHEWiHE33vr+kHxWATaNaTRaDQmvWYRSCmvDfL0M711vZBIMoRAWwQajUbjIcEqi3WwWKPRaAJJLCFI0q4hjUajCSSxhMCuXUMajUYTSGIJgZk1pNtMaDQajYfEEgKPRaBdQxqNRmOSmEKgLQKNRqPxkFhCYLGALUVbBJr+z/s/gd2LYj0KTR8h6i0mYk6SbkWtSQBWPAUWG5SeHOuRaPoAiWURgN6cRtP/cbtVl932lliPRNNHSDwhsKfpOgJN/8btVLfO5tiOQ9NnSDwh0BaBpr9j7rehhUATIoknBPZUHSPQ9G9chkUQr64hKeHtu2DvsliPRGOQeEKQlKazhjT9G1ecu4ZcTlj1HOycH+uRaAwSTwi0RaDp78S7a8i0VOLVYklAEk8IdIxA098xg8Xt8SoErcat3js8Xkg8IdBZQ5r+jsc1FKcrbm0RxB2JJwTaItD0d1x9xCJwaYsgXkg8IbCnKdPZ/LFoNP2NuI8RGOPSFkHckHhCkKQ7kGr6OXHvGtIxgngj8YRAdyDV9HfiPlisYwTxRq8JgRDiWSFEhRBio89zA4UQHwshthu32b11/U4xN6fRcQJNf8V0Dbnb49MFagqAjhHEDb1pETwPnBvw3L3AfCnlSGC+8Ti62PW+xZp+jqvdez8e4wQe15C2COKFXhMCKeUioCbg6UuAfxj3/wFc2lvX75SuYgQVW+EvE6HuUHTHpNFEEt+VdjxOth7XUGtsx6HxEO0YQYGU8hCAcZvf2YFCiNuEECuFECsrKysjNwJ7F66hXQugdi9Ub4/c9TSaaOMrBHFtEWghiBfiNlgspXxSSjldSjk9Ly8vcidO6sI1dNgIZ7TWR+56Gk20cfu4huLZInBpIYgXoi0E5UKIIgDjtiLK1/fZwD6IRVBuCkFD9Maj0UQabRFowiTaQvAW8E3j/jeBN6N8fW/WUKBF4GqHyq3qfmtddMek0UQS30yhuBQCHSOIN3ozfXQesAwYLYTYL4S4GXgAOEsIsR04y3gcXTqzCGp2er+gbdoi0PRhfIUgHmsJtEUQd/Ta5vVSyms7eemM3rpmIJX1rRxpamNUQYb3SY9FECAE5Ru993WMQNOX8XMN6RiBpnviNlgcCf7yyVdc++Ry/yctVrAmd0wfPbwRLDaVVaRjBJq+jLuPWATudnC7YjsWDdDPhSAzxU5dixMppf8LSWlBLIJNkDsKUrK1RaDp2/SVGAFo91Cc0K+FICvFjtMlaXYGrDqS0jrGCMo3QsEESM6ANi0Emj5M3AuBz+Qfj+mtCUi/FwKAo80B/Vbsqf5ZQ001UHcACsZDcnrfswjcLvjsD9BcG+uRaOKBvlJZDLrfUJzQr4Ug06GEoK653f+FwM1pyjep20LDIuhrMYKKzbDgf/Vm4BqFywk2h7ofj8Fi3zHFo1AlIP1aCDq3CAJiBKYQFEyApD5oEZjmv/a3akAFi+0pKvkhHtut+8UItEUQDySmECSl+mcNlW+A1FxIL4DkzL5XR+DUOz5pfHC1gcWuXKDx+J3QMYK4o9fqCOKBzBT18eqCxggCLIKC8SBE34wR6EpNjS+udrAmqftxGSzWMYJ4I0EtAp+sIVc7VGyBwuPV4+QMJQSBKafxjHYNaXxxtYHVBnZHfK6421shySjyjMfxJSD9WggyHCFkDdXsUl/GgvHqcXIGIPvWnsbaItD44mpTFoEtJX5jBI5M477+zsYD/VoIrBZBRrKNupZgMYImlW658HfqOdMiSEpXt30pTmBaBLpkXwOqYteapALG8Zg11N4KjizvfU3M6dcxAlDVxUGzhlyt8LdZ0FAOJ9+jMoZABYtBuYcyCqM72J6iLQKNL642o11KSpy2mGiBzEHqvl68xAX9XgiyUuwdg8WmWerIgmtehOKp3teSDYugLwWMddaQxheX03ANOeLTxaktgrij3wtBZoqtY0HZpGsgNQfGXQK2ZP/Xko0gVl8SAm0RaHxxOcFqVxZBY1WsR9MRHSOIO/p1jACURdDBNZSSDROv6igC0LdjBPpHpQEjWGxXFkG8uYakVO4gbRHEFYkpBF3Rly0C7W/VgKostiYZ2XFx5i40J34zFqe/s3FBvxeCTIe9Y9ZQV/RFIdAWgcYXl9OoLI5Di8BctHgsgjgTqgSl3wtBVoqdpjYXTpc7tDf0RSHwxAj0j0qDv2sobi0CwwWrew3FBSEJgRDiTiFEplA8I4RYLYQ4u7cHFwmyUjspKusMmwOEtY/GCPSPSoN/sNjZFF9V8uZixZZixDDiTKgSlFAtgpuklHXA2UAecCOx2Hi+B3hbUYcoBEJ420z0FbRFoPHFTB+1pwAyvvr5mBaBLVltGRtPY0tgQhUCYdyeDzwnpVzn81zYCCH+SwixSQixUQgxTwjh6Om5uqPTfkNd0df2JDDNf/2j0oARLLarVTfEV+M5M2Zhcygx0IuXuCBUIVglhPgIJQQfCiEygBCd7v4IIYqBO4DpUsoJgBW4pifnCoXMHgtBXS+NqBdo1wVlGh88baiN9VU8fS9Mi8BuCoFevMQDoRaU3QxMBnZJKZuEEANR7qFjuW6KEMIJpAIHj+FcXZJltqJuae/mSB+S0qMfI2iqgSN7/KucQ8WpC8o0Pngqi02LII4az3liBNoiiCdCtQhmAduklLVCiOuBnwFHe3JBKeUB4CFgH3AIOCql/Kgn5wqFnlsEUY4RLH0E/nlpz97brtNHNT64nN421BBfmUOeGIFDxwjiiFCF4HGgSQgxCbgH2Av8sycXFEJkA5cApcAgIM0Ql8DjbhNCrBRCrKysrOzJpYAeBIvB2JwmyhZBYwW0HlUb0YeLtgg0vphtqO2p6nE81RJ4LIJkbRHEEaEKQbuUUqIm8IellA8DGT285pnAbillpZTSCbwGzA48SEr5pJRyupRyel5eXg8vBQ67lWSbJUwhiIFF0GLEJHrSJKxdt6HWGLhdgPQ2nYP4tQhsyXrxEieEKgT1Qoj7gBuAd4UQVsDew2vuA2YKIVKFEAI4A9jSw3OFRNhtJpIyoh8jaD0GIXD6pI/GU864JvqYrhazDTXEWdZQoEWghSAeCFUIrgZaUfUEh4Fi4MGeXFBK+QXwCrAa2GCM4cmenCtUetRvKNrbVfbUIpBSWQTCqh5rn2tiY/7/+1oEcekaMmMEWgjigZCEwJj8XwSyhBAXAi1Syh7FCIzz/VJKOUZKOUFKeYOUsle/DZkp4fYbSifq21WarqhwLRFzRaW7OWpA7cENRmWxESOIS9eQtgjiiVBbTFwFrAC+BlwFfCGEuLI3BxZJ+kQHUtM1FG6qn7naSxlgPNY/rITGYxH41hHEqUWghSBuCLWO4KfACVLKCgAhRB7wCcrFE/dkpdjZXhHGpJ5kCEE04wQ9dQ05A7o5alM7sXEbCx6/OoJ4EgLj+2lN0kIQR4QaI7CYImBQHcZ7Y06mI8guZV3hsQiiVF3scnpXbWG7hoz3ObRFoEF9l8C/sjiuhKDFaOwodIwgjgjVIvhACPEhMM94fDXwXu8MKfJkGTECt1tisYTQIsmzb3GULIIWH8E5VotA52UnNr6uIdMiiKfvRHurd2dAm0MvXOKEkIRASnm3EOIKYA6q2dyTUsrXe3VkESQzxY6UUN/a7mlC1yXRjhG0+hRpt+kYgeYYcPm4hiwWteqOR4sAwJakv69xQsib10spXwVe7cWx9Bpmm4m6ZmdoQhDtfYv9LIIwr+mxCLQQaPARAuN7bo+znv+BFoHbCW63Ei1NzOhSCIQQ9UCwZHoBSCllZq+MKsL4tqIeHMobzP1Uo2YRHINryBMj0MFiDT7BYkMIbCnx13TOtAisSerW1QqWlNiNSdO1EEgpe9pGIq7ISgmz35AnRhAlIYhEjEC7hjTgU1nsYxHEUx2B09c15NMm266FIJYkhD3maTwXalGZzaFK9GNiEYSbNaSDxRoffCuLQRWVxVsdgW+MAPSeBHFAQghB2PsWCxHdPQlMiyAlO3wz3hmYPqp/VAmNb2UxxN8G9oExAtCLlzggMYSgR3sSZEbfIsgY1IMYQaBrSP+oEhrf9FEwNrCPU4vAagiC7o8VcxJCCNKSrFgtIsyisvQoxgiOqqBeyoAexAgCLAIdLE5sfCuLwcjVjych8LUIjFu9eIk5CSEEQggyHbb43aWstQ4cmZCUdgwxAh0s1uBTWWzkgdhT4sw15BsjMIVAWwSxJiGEAHqyJ0GUYwTJmSqw1xOLQFiViIBeXSU6HYLFKXFoEQQKgf7OxpqEEoLwWlHHwiJI70FlsZF6p1dXGvCvLAYjWBxPQtDi/a56YgTaio01CSMEmWG3oo7ivsWmRdAT15Cz2aeJV1L8rK5qdsOjM6D+cKxHklh0qCyOt2BxMItAC0Gs0ULQGdHOGvLECHqQNWQW49gc8ZOBcXgDVG2Dil7dhVQTSIfK4nhrMdESJFishSDWJIwQZKXYw8saMmMEbnfvDcqktd5rEbid4bl3TIsA4ssiaDEa6UVzcx9NkMriVPWdiMb3uDvcLvX97lBZrIUg1iSUEBxtbsPlDnEf4uQMQIIzCttVttSpymAz4BvONdtbvH3ne7Ot7+7PYdFDoR+vhSA2BGs6B/GxQPDdphL8ew1pYkrCCMG4okycLsnasiOhvSFaexK42tXEb1oEEJ57yNns7Tvfmzs+bXgZFv859OPNIrlobe7T32iqgU/vV6vocHA5VRaZxaoex9OeBL7bVPreaosg5iSMEJw8Kg+rRfDp1oruD4bodSA1J0pHD4XAzyJI7r0ffGuDGleoLgZtERwb2z+GRQ9Cxebw3udq81oDEF+7lAVaBJ5eQ1oIYk1MhEAIMUAI8YoQYqsQYosQYlZvXzMrxc60odl8urUytDd49iSIkhAkZ/ZsH4RAi6C3gsVtDYAMPSfd7J/UcrTr4zTBMb8DLWFaVC6n1+UCKkYAcW4RxMHYEpxYWQQPAx9IKccAk4CopJacPiafLYfqOHQ0hMnM3KUs3B9iuLT4WATmj7anFoG1ly0CCL3OQVsEx4bZfDBc15rb6a0qBu9kGw97EpjfTbtPcgPET6ZbAhN1IRBCZAInA88ASCnbpJS10bj26WPyAVgQilWQO1JNrOte6t1B+VkEpmsojB9te0t0YgSmZRSqteIRAh0j6BHmdyBsi6AtwCIwvhvx0GYi0CIwN7DXFkHMiYVFMByoBJ4TQqwRQjwthEgLPEgIcZsQYqUQYmVlZYjunG4YmZ9OSXZKaHGC9HyY9V1Y/xIcWB2R6wfF1yLokWsoSllDHosgRGulVVsEx4SZORaukLra/YXA436JwxiBeV9Xw8ecWAiBDZgKPC6lnAI0AvcGHiSlfFJKOV1KOT0vLy8iFxZCcPqYfJbsqKLFGUI2xtwfQmoufPQzkCGmnYZLUIsgTNeQxyLoxc3A28IUAtMi6G3XWn/F/Du3hGksu9rA6uMaimeLAHo3wUETMrEQgv3AfinlF8bjV1DCEBVOG5NPs9PFF7truj/YkQmn3Qd7l8DWd3tnQC3HKATOZn+LoLdysj0WQaiuITN9VFsEPSLSrqF4tQisvZjgoAmZqAuBlPIwUCaEGG08dQYQZo5cz5k1PAeH3cKCUNNIp34LckfDx7/oHRPWdKH4po+GWlDmqdQ0fuzWXooRuNq9E0koIiWlriM4VnrqGnK3e6uKwSdYHA9CoC2CeCVWWUM/AF4UQqwHJgO/jdaFHXYrc47L5dOtFchQ3D1WG5z1a6jZCdvei/yAWurUBG5LVvnf1qTOJ9vmWpj/P97qUfPH3dt1BL5WQChC0NYA0g0IbRH0lGOyCHzrCEzXUDwIgWkRBAqBriOINTERAinlWsP/P1FKeamUMsRy38hwxtgC9tU0selgiD+yITPV7dH9kR+M2XDOpKvGc199CJ8/BAfXqMeeFZZP07nesFr8hCAE15AZH8goVEIQD31u+ho9TR8NrCOIp1x9z/c1MFishSDWJExlsS8XHF9EktXCK6tCnNgdA1RudlNV5AdjtqA2SUrvXAjqD6nbBsOt1cEi6KWmc61hWgSmEGSVADJ6G/z0JzwFZWEW5LmcARaBUZsSrxaBTh+NCxJSCLJS7Zw1voA31x6grT2E1aoQkJoDjb0gBIEWgT2184mzoVzdNhpCEMwicDsjvwIP1zVkujOyStStdg+FT6RcQ7ZkQMSJEHRiEehgccxJSCEAuHJaCUeanCzYFmLQODUXmqojP5AOFkFa5wVlphB0ZhH0VjdH34k8bIsAHTDuCcdUWewjBELEzwb2OlgctySsEJw0Ipe8jOTQ3UNpudGxCLqKEdQHCEEwi8D3+UgRbozAnLyyBhuPE9QiaKqBhh4WQ3rqCHoSI7D7P2d3xEkdQSsIS0ALDF1QFg8krBDYrBYun1LMgq0VVDWEsIJOy+2dGEFrPSRneR+bG+IEo8HY9tG0DIJlDUHkf1hmjEBYQ+tZE2gRJGpR2Tt3wSs39uy95t/Z2ajSd0MlMFgMaq+L5qjmYwSnvcW7raqJjhHEBQkrBABXTCuh3S15c+3B7g9O7SWLoKUHFkGjscrsYBEk+z8fKUxhSs8P0TVkVMMmumuo7iDU7g3/fS6n8pun5qjH4fz9AmMEAJklUHcg/HFEmvZW//gAxNf2qglMQgvBqIIMJpZkheYeSstVP8hIprq5XaqZm1+MIDX4ZNvW6G381qlFYNxG+odlunbS80NMH61TY0nNNd6foELQWg+NPYgrmf//GUXqNpzMoWCuoayS3kl9DhfTIvCltzLdNGGR0EIAcNX0wWw5VMeNz61gd1UXq11zdRbJgLE5wToC0keDuV/qDbdQykCv3zkw+Ga6BHrDIhBW9TcINVjsyPK28k7UGEFLnXLthJuxY/7/m0IQjpAGBotBCUHdwfBcTL1Be2sQIeil2hdNWCS8EFw7Ywg/u2AsX+45wjl/XsSDH26l3RUk/TLNWN1G0j3k23DOJCnNqMwNqHo2rYCiiWpyaW3wsQgCg8W9kDWUnN51jYM2B01/AAAgAElEQVTf8eYezOmASNwYgfn/2xRCXytfzKyxTNMiCNc1FBAjyCoB6fLGmGKFs7mjEFi1RRAPJLwQWC2CW04azqc/PoULJxbx2IKdPPjhto4Hmm6OSAaMfVtQmySlqfYMgT8O0yIonKhuGyuC7Phkxgg6EYLXb4fNb4U/ztYGSMoIXQhajipxs1iUVZCIFoHb5XWjhfudcQa4hsKKEbQHcQ0Z2Vuxdg91FiPojdoXTVjYuj8kMcjPcPCnqyeTlmzjiUW7mDo0m3PGF3oPSDNaYffE59sZQS0Cc0+CRu9KH7wWgSkEDRVBLIIugsWudrXJji0Zxl0c3jjbTIsgLfQWE44B6n6iCoHvZw7XnWhaBBnG9y9siyCIawjiQAg6iRGAqn2xpHR8jyYqJLxFEMjPLhzLpJIsfvzyOvb4xgzSomQRdLZdZf1hZUbnjVKPG7qwCIIFi5uqANmzNMLWBiVQXWU0+eKbCZWc6e2wmkj4ruLDXTz0NFgspVpdd3ANFavbmAtBJxaB+ZomZmghCCDZZuWxr0/FahXc/q9V3g1sHANUwLRXYgS+dQSd7EnQUA7pBZBurBIbK7w+VzMv29qFRWAWofVECNoa1Mo+KV2JTHfBPTNYDNoigPAtAo9ryPi/DtU15DaCwYHB4uQM9f2NuRAEsQg8CQ5aCGKJFoIglGSn8tCVk9h6uJ63zBoDiwVSB3pz+CNBi89eBCa+riFf6g8rIUjNAYTXIggs14fgPypz3D21CEzXEHS/X0KrT9sMR2ZiBot9P3O4VqTpGnJkKQsxVIvAtAQDXUOg4gQxF4IuLILe2lBJExJaCDrhjLH5lGSn8P7GQ94nI91vqLOsIejoi28oVytEq025qcwYgW8coSsz2yMEYW59aI4lKSO0HdTaW5VAJbxF4CsEPbQI7GmGay1EITX3qQh0DUF81BIEjRF0k+CgiQpaCDpBCMG54wtZvKOKuhbjBxbpfkMtdepHa/f5cSR1ESNILzDGkR++RXAsrqHWen+LoCsh8MQ9TCEIYyLrDVzO2EyApvhZbOF/Z0yLICk1PIvKIwTBLIISOFoW3jgiTVCLQAtBPKCFoAvOO74Qp0t6t7VMzYlcsPjIXti/0lt0ZWK6hnyLytrboLnG6zNOz/fGCPwsgi5iBGbr6raG8Ap4pLGfQFJ6524rXzzurjixCFb/Ex6dEf02zObfYcDQHtQRmBZBqhLSiLiGSlTrj1j+XwSNEWghiAe0EHTBlMHZFGQm8/4GI4c/EhZB+Wb499Xw8CTYtxQmXOn/ejDXkJk6aloE6fnqucAflvmjCuZv9e2C2RKGe6i9VQUhk9M7t1Z8CRQCR5YSNXO1Gm1q9ypXS7iT8bFiWkEDS3tWR2BzgMWqLIKQg8XG3zgwWAw+KaQx7DnUlUWgYwQxRQtBF1gsgnPGF7Lwqwqa2tpVLUFL7bFNau/8F+xbBif/GO7aAOf/wf/1YO4XUwh8LYKGStVa2NcisNoB0UmMwGffhXDcQ6YghRojMFNFzbhHrNtMmAIQ7e6bLXUqyyyrpGd1BObf2pHVA9dQsBhBHBSVdRkj0NXFsUQLQTecO76QFqebRV9V+vQb6n51WdPYxtHmAMFobYADK2HajXD6z7yrNF+C1RGYVcW+MYL2ZhUA9v1heTYh6cQiMI8NZ1I0J/BkX9dQF0VlHVxDmf7n8UVKWPIwHNkT+njCxfys4VhBkaC1Xq3mU3PV9yWcyllnkwoUQw+DxV1ZBDGKE7jaVZuLToVA9xuKJTETAiGEVQixRgjxTqzGEAozSgeSnWrn/Y2HOxaVSQmv3grbP/F7j9stufxvS5j7+095+vNd3u0wy75QbpbSkzq/oMWq2kr7WQSGEHgsAkMQavf5WwRgdHPsxCLIGanu98giCDdYHGgRBJnMGsrh41/AF0+GPp5wMT9rtC0CM4U2NUdNgOEIUVuj1w0XVrC4ixhBRqGyUKJpEbhdsPjP8PkfYdVz6rlA11BXtS+aqBFLi+BOYEsMrx8SNquFs8cVMn9LBbub1KT7lzeX8uWeGuV/3vCy90tusGJPDXuqm8jPSOb+d7dw9p8/Y+nOKtjzucoiGTyz64sGVvDWl6udncw2F+nGbXuQJl42R8cfldutYhtmVXJYFoEhBCFnDQXGCLqwCEx/ddny0McTLh7XUJQtAnMLUs/iIQz3kLPJaxkmZ6n/51BWzF25hixWyCyOrhDsWwaf/Arm/wbe+7F6zvwOm/RW63RNWMRECIQQJcAFwNOxuH64nDuhkIbWdm57TW0ysm//Pm7750qqthkT2N4lfqb/66sP8FDy03wwbRXP3XgCQghu+cdKWrYvhOJpalLtikAhaDisfkAWq3psWgTgn3oKaoUV+KNqrlGr0tzRxuMexgjsIQqBsHjdSKZFEGxVa26Wcmhd5/s0Hyu9ZRHsXAC1XbhZPK6hgepxOELQ1ugTIzCFNASrwN2FawiiX0uwz/h9/HCLiofdvgQmXuV/jK2XWqdrwiJWFsFfgHuATh2nQojbhBArhRArKysjWM3bA04amctdZ47kexfMAOCnp+ThcksWLPhQHdB8BCqVcdPidLF0wzYuFwuwL/sLp5WmMe/WmeTY2rCXr6Nt8OzuLxi4XWV9uf/kn5bvvW8LdA0F2frPLCbLOU5N0j2NEdiS1GqzqxiB6RIx216Y7TOCWQR1RtW2ux0Org59TKEipRJBiGyMwO2Gedeq+EZntBodWM2uteFkm7U1ei0C07IKJYXUXAAEyxqC6NcS7FsOeWMhcxAMGAKFE7yLGRPdayguiLoQCCEuBCqklKu6Ok5K+aSUcrqUcnpeXl5Xh/Y6NquFu84cxaWzJwKCHFHPI9dNpaR5K0dsxg99z2IAPtlSzjTnaixINSlufJXCLAd/P6UNK26e3FeMDNxrIJDAXcoaDnvjA6BWmcL4QQVaBME2+jCLydILVM+ZnsYIoPvGc759hsAnRhBkIqs74N3IfF8vuIecTd7JMZIWQcNh5a6pP9T5MS116rP3ZEMjp0/WUHIYFkFXriHwblDjdoU+lp7idkHZChjSjRtU9xqKC2JhEcwBLhZC7AFeAk4XQvwrBuMIH4tVTcJNVZwyIodp9r280zKZo8lFyv8PvLHmABc41iHT8iF/HKx8FoDxretwCRuP7sjhheXd7GOblOZfUBZoEVisXt9zB4sgyEYfpkWQng8p2T2PEUD3exIE7sHcVYyg7qBaKeaOUoH0SOOb3RXJGIGZ5WSm9QbD4xrqgRC0NfkHiyG0gLFHCDrpLp9VotxHDRXBX48kFVuU+HcnBLrXUFwQdSGQUt4npSyRUg4DrgE+lVJeH+1x9BhzE/vq7SS5GnEMnc7HTSNp3vE51fXNfL7tMHPFesTIs1Wa6ME16t+ez7EMPoHZYwZz/ztb2HLI/4e99XAdL68sU9aC72TrdqmMH1+LALzuoaAWQcCPyvzhp+UFF4L2ts7TG31jBND9ngS+exGY47HYOo8RZBbD4BPV6jHSm5P4fs5IWgRHDCHvTAik9LrIklKVmycsi6DRP30UQrQIzKyhziyCKNYS7FumbrVF0CfQdQThkmY0nju4BoDLL7qYtuJZpDhr+e3zbzBJbiPFVQ+jzoZJV6tJYMnDcGgdYthJPHjlRDJT7Nz50hpPi+sdFfVc8+Ry7nllPc8t2eM/2TZWqR3LfC0CUKt76GgRWJM6rq4aK9RknJINKUFcQ/93PTw6Hco3dfy8rfVqMjdXmYHWSiDm7mQmQnTeZqLugPIfD5mpfPhVX3V+3p5gxgfsaZGNEdQaQlBf3nFLUVDtLNztXrdYak4PgsWBFkEIMQKzDXVXriGITpxg33K1n8KAoV0fZ7EY21VqIYglMRUCKeVCKeWFsRxD2KTmqMn5wGqwp2HNH8NVV16rXjq8nK9lblLBuuGnKV/5hMth0+tqMi89iZz0ZB762kS+Km/ggfe3cuhoM994ZgU2i4WTR+Vx/7ubOdBk8VoEgTUEJuldWQQBrqGGSmUNCGFYBAGT4sHVULMTnj4T1v/H/zWzz5BJdzECc79iX4IVRbndUHdICYGZThvpNFJT8AaWRtgi2KNuXa3BJ2hT9MxJ3PzOhEJ7m5rQAy2CkFxDZrC4M9dQFDeo2bdcCbyZNNAVwb6zmqiiLYJwSctVBWUHV0PRJLBYseUMQ2aWcF3+Ps53rIehs72TwPSb1K01GUpU1tGpo/O5cc4wnl+6hysfX0ZdSzvP33gCj399KqMLM/l4ez3uVmOy9VQVdyIEthTqW5x878XV7K1uNLKGAoLFjRXe/O1A15DTqFA+4VYomgyv3QKLHvK+bu5FYGIPxTUUTAgCLIKmKuWvzixW2UypubAvwnECM0YwsDTCMQKfGE8wf3vghkPhWARmC+oeBYu7cQ05stT5elsIasugbj8MmRXa8cGsVE1U0UIQLmbLgMMboHiqek4IxLC5jG34grSjO2DUOd7jB02F4ulQerLf6v0n545hdEEGlfWtPHnDNCYUZ5GWbOPpb06nzZKKpb2JpodnwEvXqTeYqzkTnxjBJ1vKeXfDIV76six4+mhDhVc4UrLVZG1mjpgpnMXT4JtvwbCTVMdOE3MvApOuLAK32xsk9cURRAjMySizWK0ah8zsBYvAFILhxmeOUAziyB7IGqLumxabL+bq3XQNmYuHUPBtQQ3KJZeUHmawuJP0UYhOCqkZ+O8uPmCSXtB14F3T62ghCJe0XECqyXbQFO/zw+Z6V3MjfYRACPjGG/C15/1O47BbmXfbTN69Yy6zR+R6ni8ekMKZZ1/APlHEsqpUPsu9lqavzVMuFF/MmIEthflb1Kp0/pZyQwgCYwSVXuFIyVbjN10a5qSQVaImkKFzVOsKs22zuReBSVdC0Fqnzt3BIsjo6EIxBcj8XINPhJpdkc1oaa5VMZr0QjWuSOyd7GxRaaODT1CPu7IIfF1DoXY/NeMvpmsIQt/3OSQhiMJOZfuWqcVD/vjQjk8viE4mk6ZTtBCES5p30vZYBADD5qjbgcdB7gj/9yRnBK0mHpiWxMiCjA7PD591KTn3bmTJjL9xY9l5nPZmEv9ZWYbb7ROYNAJ/7cmZLPqqktQkK1+VN1DvtPoHi6VUQpDu4xoCryluTgpmIDFvFCCheod63CFGEJA+umM+PDYTqncG33HNfBxoEXiEwLB0zNVjJNNIm2ogZaByPUBk3A9HywCphAu8rjtfAv8OqTnq7+gMwQ/uydJK9T7nCHFPAnc3dQQQnerifcuVUHaWxhqI2VZdEzO0EISLWSnqGADZpd7ns0uVhTDp2ohcJi3Zxi8uGsdr351DYVYKd7+yngsfWax6FoGaOG94g5WuUdS1tHPnGaqh3J6jLn+LoOWo8h37WQR4fea1ZYDwTshmG4rKbeo2MEZgWgSmm2X3IlVV/eKVSgwguEUQ6OOuO6AmLDPPvmiSiqNEsrCs+YiRKRXwmQOob3Hy67c3UdsUQr8bMz5QOFGNN9gEFugaCqeWwHQN2X2EIDnExnPdVRaDEoLmmq4D/uHicsLWd2HtPFj5nMo+666fli/pBepvE6s9KzRaCMLGtAgGTfHPiBACblsIp9wd0ctNHjyA178zm4evmczRZifXPfUFX+yqVtc77jQ+3VaJ3Sr4+syhjMhPZ+eRNn8h8C0mg+AWQUaht+dLzghAQNVXvLB8L0eP1tBu83FTJKUBkj+9v07VPNTsUuesO0j7K7eoS1p8jofgMYK6g8jMQdzywmoWb69SLq2SEzwV2hGhuQZSs711DZ1YBB9sPMxzS/bw4hf7uj9n7R51mz2sc5dGoGsosGttV5iuIV8rLNTNaUJ1DUHkNqhxu+C121Qs643b4Z27AAnHnR76OczvZmNsW8kkMloIwsXMvvF1C/UyFovgksnFfPzDkynMdPD7D7Z62lR8urWCmcNzSE+2ccbYfHYfcakGcy4jp9y3mAyCCEGZ/74IdgdkD6Xl0Bbuf2czVmcj83c10dqugsubqtTtvz/fwqLtVXBktwqGX/E0lma14v1wR0CdQXKGWq36ukbqDtCQnM8nW8q597X1qqZi2Bw4vD70rRm7o/mI4RoyPnMntQSfb1cTtKegryuO7FHpjukFkFEQPFjs6c/k4xqCEC0CM2vI1zUU4uY0LqdKHe0qZdNTSxCC6HWHlKqr6KbX1P4ad6xRzeXu2e2NoYSCGe/S7qGYoYUgXNLz4cI/w4zbon7p1CQbd545ktX7avlkSwV7qxvZUdHAaaPViuqssQU0S8Mva2YOmTuTdWURBG6Qkzua2n2bkNJFumhh6xHJd/61mv+sLOO5lWrVNjjdzdOLdkLNHhg4nINFZ/Kb9m9SJ1N5an07TpdPhk6wzWnqDlAu1QS5/0gzzy7ZrQLV0h25NNKmGm8RHQR1DbndkiU7qhiYlsTe6ia+2N1NUPfIXtUWw2Lp3CJoqVPBXrPBmqfxXAhC4OzENRRq+mhX8QHwEYIIxAk+/R/VQmXOXXDy3So7a8AQb8fVUPEIgQ4YxwotBD1h+k0dC7yixNemlTA8N42HPtzGx5vVCuqMsWqSnzIkG1uSf3/3wwdVVtCSQ4KF2yposBguh+YjakUXRAhq00vJbt7HjVPVRH3y+FI+3VrB3a+spyhPPXf1pIFs3rEL2uphYCn/WLaHf7rO5o1zlrClLon3Nvg0ZAvMhZcS6g6yszWTIQNTOWtcAY99uoOKrOOVf3tvBNxDUqrPmDqwS9fQ5kN1VDe28aOzR5GebOPllWUqkN2ZZXBkj3ILgRLXoMHio/4ptIEWgau987qGtoA6AggjWNzetVsIVLWvsBy7EGz/RG04M/WbcOavju1c5iJFWwQxQwtBH8NmtfDDs0exrbyeh+dv57i8NIbmqEnDahEMK1Srsa37K7np+S95+bNVuKTghpd28q3nvuTKJ1bgTspQk2JjFbhaWXkkjX8t3+tpefHuoUyShZPvjFNiMmXkYB762iSunTGY758zGYALRmcwyq5cKi0ZQ/n3F/s4b0IR188sZXhuGs8s3u11swT21G+qBlcb646mMX1YNj89fyxtLjcPflqm6hn2LDn2P1RrnXKRpWQrd5ctJahryHQLnTW2gIsmDaJhw3vwp7Gw4H+Dn7d2r7dtQnqhikMEFvC11vtnTqUMUJNvU5Wxq93N8NiJwYOjphAEWgSBrrVguNqCBor/8MFW3jeF2WqDjEHHLgT7vwQEnPeH0KqHuyJNC0Gs0ULQBzl/QhETijOpb2nn9DH5fq+NKVZuiFufW8LKPTWcOVjgThnIK9+dy8PXTGZ3VSMV7am0NVTTUr0HgL+vc/KzNzZy6oML+eNH23h1r5qEBhxZr06alMGV00r43eUTSU5VE1yGpY0rStVENm+7lfqWdm6aW4rFIrhpbinr9x9lhelmCdycxtiQZkdrFicMG8iw3DRumlPKK6v3U54zXfVxOtbN7s28/RTDTdFJ19XFOyoZU5hBfqaDaycP5BeWZ3ALq6qu3jHf/+DmI2pl7msRQMcgp9mC2sRiVddvqoY1/4LNb6jYwu5FHccdzDVkZmF15x4K4hoqq2nibwt38vM3N9LYasSNBkSglqB6h3IDBbY46Ql2h/qM2jUUM7QQ9EEsFsF9543FZhGcf3yR32ujilVQ+IbphXx+z+mMy2zFnlnI1CHZXDK5mCe/MZ0qVyprv9rNg/+nJrqzZ03nhZtnMGiAg0c+3UGlw1jxHjC2jAhMHwVoa+SMggbcUvC75c1MHjyAaUNV/OGKqSVkp9p5evFudayZqWI06jMzVg7JHKYb7/ne6SPIS0/md5uNPX6PtZ7AnPTNmEjKgA7umOY2F1/uPsJJI5V4Hv/VY5SIKn6T+WvIG6OyYep8XFxm6mi28fcx3YOBAePWuo7V1am5qj/V+z+BoXNVVtDmNzuO29yUxuLz0/RsTtOdELR3yN1/a52q16hqaOMfy/aoJyNRXVy9w8gwixC6ujimaCHoo8wZkcv6X53NlCHZfs87UtRK8rZZg8hKtRvtJbwb+5wyKo9BhUXYWmtJaVaTxFVnzOSkkXm8+p3Z/PuWE3nkptOVuX7A2DXMr6DMWKm2NZLdeoAjtjzasHPzXG9NRUqSletnDuWTLeXsqKhXE+eQ2ap1hZQei6DJkc9xeercmQ47T31jOp81l9KOlfZdxxgnMNpLNNkyWbCtAunI6iAEK/bU0OZyM3dkHhxci/jicbYMuoLny4ezfvZf1er81Vu8GVhms7lAiyBwJRvoGgIVJzi0VvnwL38SRp0LW9/xntvEd79iE/NcR3Z3/ZmDWARvrzvItKHZnDY6jyc+20Vdi9MQggM9b7khpaoZibgQaIsgVmgh6MOkJgWp3LQmq1uzuMi3vYTBwNwCJgx0850pySq7xVg1CyGYPSKXyYMHQN5ob7vl5IDKYlAVsDW7SSkYwU1zSjl3gn/w/Fuzh5GWZOOB97eqJ6Z9U3U43bNY1RxgZfjQYVgsXv/ypMED+N3Vs1jvLmXv6o/8K6m7Yd6Kffz4P+u8cQlj0n96VS03PvclayoF7QFtHj7/qpIkm4UZQzLh7TshNZeiKx5gUJaDK16pYtHI+1TgetEf1BvMv4cnRqCyXZ55fxmPfrrde+JA1xBAmhEwvvivqm/UuEuUq2hvQDzEd1Mak8EzlFX16i1eKy0YbqefEGw7XM/Ww/VcPGkQPzxrNEebnTy7eLfPBjXBV+A7KupVbUdnNFSoJIGc4zo/Jly6qy4u39z1Z9ccE1oI+hs2Qwg86aOV3pWrSUo2SW1HSWs+pCaFYMG+3FHe+4FN50C5MGp2kVo4kl9cNA671f+rlJOezPdOG8EnWypYuqNKTXyOLFj9D1pryjgss5lWmksg504oxFo6l8HNW3nik40hfeTWdhcPfbiNV1btZ/U+Y9VvTPqvbm5iTGEGuxvtVFeWs3BbhUcsFu+o4oRh2aTs/1yt1s++nwE5+bx7x0mcMiqPb6w+jiVpZyM/+4Py5x/Zq2IOhttHGrUZRysP8PjCnTSYPvhgrbin3wzn/A7GXUJ1Qytb0k9UIhzoHmpr8O8zBCrz6VvvqnP+89LO02vNOgKDt9YdwGq4D48vyeKc8QU88/luGhyGaAeJEyzeXsUljy7hm8+tYGdlJ11ma4wK8ogKQYFql94Zb9+pXHWaXkELQX/DIwRtsOEV5WrIG+1/jBk4rS3rWENg4vseX4vAlgIIlTbZVKVaPHfCjXOGUTwghfvf3YLL6oCJV8Pmt2g7sJ6DMocThmUHfd/EOReQJFx88fn7HDra7Pea2y1xBVgK76w7RHVjGzaL4LklhvvEiBHsb3Vw/6UTOHXyKDJp5FvPfcmJv53PnS+tYevhek4amaeylCw2GKu2xshOS+Kpb0znVxeN43u117FPDML5n5tVjMOMDwAvrjxMjUxnak4bjW0u3lx7QLl6nE0dXUPHnQazvktZTROXPLaEi/6+itqS02DL2/57CDuDWASgrnvj+6ow8IXLvO08fPFxDUkpeWvdQeaMyCUvQ30n/uusUTS0tfPiFsMlFBAneGvdQW58fgUl2amk2K1eay4Qsw9VRF1D+crKCNb6wtmihLp6R2iFdZqw0ULQ3zCFoGYnvPNDtQfCpOv8j3EMUAHZqq9UBkkw/CwCHyGwWJRVUG6s1rM7FwKH3cq9541h86E6Xl29X+Wcu1rJOLqNCnKYUJwV9H1iyEyksHCBWMofP9zmeb6prZ3LH1/KdU8tp90oWJNS8vzSPYzIT+ebs4fx/sbDSjyaa2gUqRxXkMW0odnk5BSQQgsPXTaWGaUD+Xx7FRYBZ4zJV90yiyb55e4LIfjWnFKe+/ap/Lf1v3A3HlF7UBhuobVltfzm7c00JuVySpGLsUWZ/Gv5PmRrQJ8hH3ZXNXLVE8uob2lnYFoSDx8apwr+zG0dwXANpfFVeT03PPMFv3prE2+uPcCB2mblUvr6f1SX2x2fdPzDuZyeOoI1ZbWU1TRz8SRv19oxhZlcO2MIj642WpD4WATPLt7NHfPWMGVINi/fPovvnHocH28uZ/muIEVw1TuU4GR1/O6U17Xw3RdXccljSzjtoYXM/O18fvLKejYe6KYOoquiskPrvK7Ow+u7Po+mR2gh6G+Ym4HP/42a7C9/omMXSDOTxtnUuUXgEQLhX9wE6vHhDer+wOFdDufCiUVMGTKAhz7cxpq2YtyDpgHgTi8i2WYN/iZHJmLajXzNsoATN/yczWWVuN2SH728jrVltXyxu4ZHF6hV6ep9R9hw4Cjfmj2Mb80ehpSSF5btpba6nGpXGtfOGIIQwlNdfOX4dB69biorf3omK392FiMH2pXvuZNNVKYMyeZPd9zA8xmqj9IzmyWjfvY+lz62hLyMZAoHDUU0VnD9zCFsOVTHxl37PZ/Bl/X7a7n6iWW0truZd+tM/nTVZP6vdgxtItnfPWTsV/zQh9v4YncN//dlGXe+tJZT/rBATcoDh6sMpGAToo8QvLX2IEk2C+eM99/i9JcXjWPkkEHUyVSqD+7E5Zb86q1N/OadzZwzvoB/3jSDrBQ7N80ppSjLwW/f29IxVlO9U43D4v//53S5+d6Lq1mwtZKsFDsTirOYPiybt9Yd5MJHFnPF40tZW+YfsK9rcfKjl9fx2UHDPRlMCPav8N4/tK7j65pjRgtBf8MMFrY1wLm/Cz5Rp/i4ZIKs6gC1T0BShrIGAmMIST57AHfhGgK1sv7FheOobXZy2d+W8sv9SgjS8oZ0/Tku+CMtc+7ma9ZFuP91JY9/sIr3Nx7mZxeM5bIpxTzy6Q7W7DvCc0v2kOGwcfnUYgYbVcrzVuzj0OGD1IkMLp9S4v+ZDZeRxSIYmJakVvmuNrWrXCcUZDr45h338+mI+6gffwM3zSnlv84cxb9vPRF7ViHUl3PJ5GLSkqx8uNrYdzk5k8bWduat2Mcljy3h4keX4KXOZoEAABivSURBVJbw0m0zGTcok7kjc7lmzljmt0+kdcMb3gyetibq3El8tLmc2085jg2/Opt375hLXkYyD364DQlQNBEOb6DFqWIj+6qN2gMjWNzY2s7b6w5yxph8Mhz+BWbJNit/v2EaFZY8Nm/ZxLdfWMXzS/dw05xS/vb1aTjsanJPSbLy47NHs37/Ud5ef9D/D9JJ6uiDH25j5d4jPHDF8fzzphk8cu0UHr1uKsv/+wx+fuE4DtY2c9UTy3httRLLA7XNXPn4Ul5dvZ8HPjfSfYMFjMu+UJlaGUVaCHqJEBuGa/oMpktizIUw5Ybgx/gJQScWgRCQO1JtwhKIaSGk5QV1gQQyZUg2S35yOit217B6Rz6LvtrO0BMv7fpNQuA462d8XpvJiRt/xYAvriZ51D3cPPd86lvbWbG7hh/MW8Ohoy3cNGeYJ4PqxjmlfLipnNb2KlIyclQKLXTeb2jvUnXbzbaKjiQbp19/Lx16ahr57+lJVi6bWszqlR+DDT7e1cS9ry6gurGNUQXp/PzCcVw+pZjsNG9Wzz3njubRzbM4r/lLKnevI++4KeBsYnNlO6lJVm6cPQyb1cL4QVl8//QR/PT1jSz8qpLTCo+H5Y/zx/c38dTSMpbvqublb8/CYlQWP7ZgB9WNbdxyUnCRzs9w4Cg5jrZ9u5i/tZxfXjSOG+d0PPayKcU8s3g3f/hgGxdOHITVIlQ8o2aX/y58wIebDvPkol1cP3MIl0z2300vK0WlF182pZjvvriKH768ji/31PDJlgpanC6ev/EE3l2aBHth8drNzB13sffNUkLZCig9RaXlHlzb5f9TOCzdWcXTn+/mVxeNZ0hOkLhMAhF1i0AIMVgIsUAIsUUIsUkIcWe0x9CvScuF6/4Dlz7eeel/KEIAMPwUKJjQ8XkzZtBFfCCQvIxkLphYxM8vP4GT732DEeMmh/S+mZd/n3syHgB7Krfsuwfx2q1kuo7yp6smcaC2GbeUfGPWMM/xJ5YOZGxRJlk0kJfvk9LqCGi2Z7JvGeSNDb9Rmkl6gWcT++tnDiXFrTJtHllSwciCdF65fRYf3nUyN88t9RMBUDGUyy+7GoD/e/VlGlvbcbc2sLXGxddPHOJ3/NemDWbwwBT++NE2ZMHx4Gpj8fIljC3KZOXeI2qbUpeTRpeFpz/fzeVTipk2tPPPlFlQyihHLS/dOjOoCICymn5w+ggO1DazaLuR0XO0TFlQOSPYW93IO+sP8vsPtvLj/6xjYkkWP79wXKfXHJiWxAs3n8gNM4cyb0UZSVYLr35nNqeOzue315+KGwurNm/luqeWc9dLa/jVW5t4f/EKZSUMngFFk5BVX/HQO6v5n3c2d98pthOa2tr55Zsbue6pL/h0awXzvoxAJ9YwKa9r4aUV+3r8GSJNLCyCduBHUsrVQogMYJUQ4mMp5eYYjKV/Mursrl/3CIFQfWc648xfBX/etAi6cQtFArvVwu/vuhm7+xuw9C+q9UPNLk68ZT6/uWQCtY1tDB7oXc0JIfj5BWMpnNeEI9dHCEyLwLffkNulVpvHX9nzAXqqi8sZUziamYPsUAX3XjaDWSecqOITXTB85HhaUwoYWr+GO+et5om2JlpI5paT/F16STYLd5w+krtfWc8nR/I5Czgp/RB33n4rN//jS373/hauym5j4+EmkmwW7j1vTNfjzirB1lrLicXJXR52xtgCBqapHfJOG53vyRhaVJ3JN15eCIDNIji+JIu/XjOl87iPgd1q4X8uncB5EwoZXZhBTrq6vt1uR6bncZLDzaet7aypbaamsY2atkWclwQvHiwktRkuQ7J0yWeslqMozHRw68ldx6gCOXy0hWueXMae6iZunDOMzQfr+GDjYe45Z3S3/1eR5MlFu3hm8W5cUvL1E4d2/4ZeJuoWgZTykJRytXG/HtgCFHf9Lk1EMSfFjCLvhjTh4BGC8H6EPSXZZsWS5IBT74UL/qiCuzvnc8PMofzA2JnNl9nDs0lxNSB8LZ/A9tugMp9a61TVc08J6Jx56wxVGzF73PDQJhYhSB5xMmek7mDR1oNYcVFaXEBBZscePpdNKWZ4bhrf+aCOZpnETSMaSEu28b+XHU+r0011bQP769q544wR5Ad5vx9mbKiu6w1qkmwWLptSzMeby6lpbIPqXQA8tMrFhOJM3vnBXDb95hxe/+4cP0Hujtkjcj0i4PlTpOczdWAbb35/Lp/dfRrrf3k29044Sotw8IvlkgfWqc/0xBkWzptQyAMfbGXV3hD3gjZ4eP52Dta2MO/WmfzyovFcNGkQu6sa2V7RSc1ED3hz7QEeM5IZOuOzr5SFdf87W9hTFcHd4npITIPFQohhwBSgQ4WMEOI2IcRKIcTKykq9c1FEsaeoeoCu3EJd0QPXUMSYdK2axD57sPNW0S1HAenv7jELvHxjBGZ8YGjX8YEuMdMe640gZxfpo50ydDaprZX8ZLJq4jdjZPD/F5vVwl1njaJdWqhJH0lhkwpMH5eXzvdOG4F0O3E4HHxrdgj/L559CbrpOdTexrXjknG6JG+sOQDVO3Da0lhfm8wPzxrFhOKsbq2AkAnoNySEYFD9BhxDT+DTu8/gpR9dBml55DVs4/dXTqR4QArf//caJVAhUFbTxH9WlnHNjMHMOk5Vep89rgAh4MONQdqJ94CdlQ3c/Z/1PPjhNhZsDd4yY/+RJnZUNHDL3FJsVsGP/rOuQ21MtImZEAgh0oFXgbuklB2qRKSUT0opp0spp+fl5XU8gebYSMvzK44KC7MXThRcQx2wJcGcO6FseefbWnoazvkIgcUKyVn+FsHepZA1pOeCCB1312qpU62gbWF05Rw6F4CbC9QqMnvAgE4PvWhiEa9/dzZFo09QKaSGGN5+6nAy7JITjysgyRbCz9r8zLXdCMH8XzNi3kmcVdTCyyvLcFfvYKe7kOOLB3g2RIoYgf2G2hpVmvLgExmak0ZpXrqq9zi0jkyHnb99fSrVDW3cMW8NTW3tnZ/X4JFPt2OxCL57qjfjKT/TwdQh2XywySsEVQ2tnPuXRapAMAzcbsl9r23AYbcwPC+Nn73h0/HVh4Xb1ML2mhlD+J9LJrBq7xGeWBSkQDCKxEQIhBB2lAi8KKV8LRZjSHi+9hyc/vOevTfKrqEOTLlBTRqLHgz+uqcFdUDlcsoAb4xAShUoPhZrAJSl4buJfWu9qiEIx9+cO1IJ8/aP1OPAug0fhBBMGZKNpWiisnyMFX2yzUqa1U1uVoiWSEYRCGvX7aidzbDmBXA28kvb82w9XMfRsi1scxZwxxkjI+9TN/sNmZbewTWqFmbwDO8xRZOhYgs4m5lQnMX9l01gyc4qrnx8mSq664S91Y28uvoA180YQmGWv0ifO76QTQfrKKtRabi/fnszWw/Xc/+7W2huc/kdu6uygScX7eT2F1Yx63fzueGZLzzpuy99WcaK3TX89IKx/OGKiRyobeahj7YRyMJtlZRkp3BcXhqXTB7E+ccX8qePvuK/X9/gGUO0iUXWkACeAbZIKf8U7etrDEqm99wiGHk2TP2Gd+etaGN3wOw7YPdnKtgbiNF5tEMmUMoAr0VQvVP1YeombbRbhFB/x81vQOVXyjUUjlvIPMfQ2d423YHdR4NROFHdmoV9YBSUhZj/YbWpWpG9S+DjX8JzF6h+Pr5sflOJzdiLKKlcxBX2ZWS1HqIhbShnjo2wNQBK3N1O7/+R2Yq8xGf/46JJShzKVW7JVdMH8+w3T6CspomLH1nMh5sOs3RHFR9sPMS76w9RVtOElJK/zt+BzSL47qkd+yOdM14F/D/cdJj5W8p5e91BzhxbQGV9Ky8s3+M5bndVIxc+spjfvreVzYfqmDokmzX7ajn34UU8vnAnv3t/CzOHD+Sq6YOZPmwg188cwvNL9/gV0bW2u1i6s4pTR+chhEAIwQNXTOSqEwargPxDC7nnlXVRF4RYZA3NAW4ANgghzKTg/5ZSvheDsWh6wrA56l8smX6j2irx3R/BZU9AgU/aYuBeBCYp2d4YwZdPA0KlyB4rlzwG866Fp89UXUYD+wyFwtA53grjLiwCDwXj1a5nhzfAmAvUc6HsWezLwOFKTPevVK1G9i5W9Scjz1Kvr/qHOubK5+Cp0/ht+dNYpGTCxGm9k2Hj29Y7daDqAZUz0l/Qiyap20NroUQVJ542Jp/XvzeHW/+5km+/0LFDaUFmMpX1rdw4pzRoEH1ITirjijJ5Y+0BqhtU3cffvj6VW/65kscX7uS6E4eSZLVwx7w1JNksvHfHSQzLVf9HB2qbuffV9fz+g60k2Sz87vKJnr/NPeeO4ePN5fzklfW8+f05OOxWVu45QlObi1NGeYU002Hnt5cdzw9OH8ETn+1i3op9vLb6AFefMJgfnD6ygwXTG0RdCKSUi4Ho5Wlp+idJaXDRX+CtH8Df56g+RnP/S/m+O3MNOQaoPvwVW2DFk0pMIuHeGjwDbv0U5l0DFZth2Enhn2Ooj7CGIgRJqaq695DRasLtVivlcITgsr+rv0fh8UpU/jYTPvxvGH6qKhrbtxTO/LVqW3HhwyQ9fQYAxx8/NfRrhINvvOVoGeycD6fc63/MgCHq//XQWuW6qt0HjixG5Bfy1vfn8OWeGlKTbGQ67LjckjVlR1i55wiH61q4/ZTOu6WeO6GQP338FULAo9fNJslm4UdnjeKSx5bw3OLdNLS28//t3XtwVNUdwPHvLwlEngaEAIISQBQJIC+VpwpUBWyFKpZaVEQcxxZHrHZ8tLa2MjJqtVU7VLAigiA44gOl1ioRocxUJCAqBZGnEgQBFWy0oOCvf/zOyibkASHLwr2/z8xO9t69u3vOns357T33PN7fvItJV3b7PggANM+pxbRrzmLO8k+om51Fq6TH6h9Xg3sv6cSoJ5cw/pVV3D2kA2+u3kbNzAx6tTnwbLrZ8bX4/cX5XH9uGybMX8vqwnl8snwU6380iV7dux3eZ1sJH1nsjl3th1ilu+A++4W/dIpVaJnZgBw4FXRi1tV/3GbNN/3urL60NGgJo1+zif6a5B/683PbW6DavfPgmobAKvBNS+x+YlK2jEP4l65/ot0SLrzHglnhE1bBZtSAziPssRbdkDOvhWXTyGhcjbOOJksEgs/WwMIHbZW4vjeXPEbEzgremWELHYFdmL9qDvVO7kH/diXnVurY4vgSAw7LMygEgpE9875fae+Mk3I4v30T/vrmOv73rQ3ySzQjlUySMLRL2T3g+7XL5do+rXh80QZ6tWnEgg+30yuvDnW+LoJN62zt502L7aL9KQOgwzCaNmzNOJkIWU+xs0YumfVT373UA4E7ttVuCIPug7Oug3VvWLNC8Vb75VhqUjRq5djU2RsWwOAH9i8WU12y68Glf6vaczMy7DrB6lfKnoa6LE07wornrPdTwd22L6eSOZwqcupAm8ph/nircNsNLrG6HQPvhZ5jDgyw1SXRNFQwzq61DJ++fzbdZL3H2plfTku7Lbwfnh5ugbj0lOtlKd5uI/CTmrfaNqnHyzf0oV2zktd3bj7/VF5f+Sltc+vyu/zP4KFOFnh6jrFp1TMy7SL/u7MsTRfcU3KZUeDWs2txwqp/UfvZPzKLNTTcVQwPhwclA3Lz7YdE4RRYPNEu4otArxvJOfe2ktPAp4gHAhcNJ7SpfKGU40K3zNx86DYq9Wk6VK37wZrX96ezMk072t8pg6yr7MV/gQ6HMUpaBC4cD5P6gn4H3a4u+XhmVmq7DGfXt0p2907rDNCinOaQNv3tlnDy2TD5Aph+qS3gs/U9WD4TdqyGy6ZC06RpUv49wZq/GrSC/KHQ9kI7S9y2ko5fboasa/Z/rsDpzeoz7equdFk/keynH7bvWFY2vHwjvDHOejh9vcO6Ju/ZZZ/bwHvts9xTDHPGUHPli/wc+Jgm/HPfmZzfqzuNmuXZeJgTu+yfqXb3Lvjg79ZpoNuokte9UkyOlrkuKtK9e3ctLCxMdzLcse792fDcaBg5F1pVoR0/1fbttbb5xqdWfixYBfZYP2h1jk0HUtX5kkp79dewcSFct/CAX7cp90hXq0SvX2QDHw/WlndhymCbdResmUnVrpuMnGuV6jvTYc4YC7hgq85pUvfQzGyr5C+fCXk2toPtq+05RUugyxUw6H5rutuw0K4zZWTZIMdTBljvq7cm2HWNTj+BWSMsGPX9FXQaTsH2esxb9Snjf9zxiE1nISJLVbV7pcd5IHCxsW+vLUDfKEVt3FGhemjjIKrT1hXWhFeVQX4bF1kTTfshVtnv/AievMi61fYeC/Pusgvhl8+yCv+rsGZ0vabWpLSn2FZ/+2KjreOxY42NValZx5oSK5uTShXm3ADLp1uwyMq2Hldt+lXhg6geHgicc27HWgsGxVtttb6rXqy4V9bXn8OMy2BzqG/yL7FrUKXX/S7Pvr0w5xc2TmXYZFtHIY08EDjnHFgwWDoF+t5ycM1n33xlF8zz+sBpg1KfvhTyQOCcczF3sIHAl6p0zrmY80DgnHMx54HAOedizgOBc87FnAcC55yLOQ8EzjkXcx4InHMu5jwQOOdczB0TA8pEZDvwURWf3gjYUY3JOVbEMd9xzDPEM99xzDMcer5bqmrjyg46JgLB4RCRwoMZWRc1ccx3HPMM8cx3HPMMqcu3Nw0551zMeSBwzrmYi0MgeCzdCUiTOOY7jnmGeOY7jnmGFOU78tcInHPOVSwOZwTOOecq4IHAOediLtKBQEQGishqEVkrIrenOz2pICInich8EVklIv8RkbFhf0MReV1E1oS/DdKd1uomIpki8o6IzA3brURkccjzMyJSM91prG4ikiMis0Xkg1DmPaNe1iLyy/DdXiEiM0XkuCiWtYg8ISLbRGRF0r4yy1bMI6Fue09Euh7Oe0c2EIhIJjABGAS0By4XkfbpTVVK7AVuUdXTgR7AmJDP24ECVW0LFITtqBkLrEravg/4c8jzF8DotKQqtR4GXlXVdsAZWP4jW9Yi0hy4Eeiuqh2ATOCnRLOsnwQGltpXXtkOAtqG23XAo4fzxpENBMBZwFpVXa+q3wCzgCFpTlO1U9Utqros3P8vVjE0x/I6NRw2FRianhSmhoi0AC4CHg/bAvQHZodDopjn+sA5wGQAVf1GVXcS8bIGsoBaIpIF1Aa2EMGyVtWFwOeldpdXtkOAaWreAnJEpFlV3zvKgaA5sClpuyjsiywRyQO6AIuBJqq6BSxYALnpS1lKPATcCnwXtk8Adqrq3rAdxfJuDWwHpoQmscdFpA4RLmtV3Qw8AHyMBYBdwFKiX9YJ5ZVttdZvUQ4EUsa+yPaVFZG6wHPATar6ZbrTk0oi8kNgm6ouTd5dxqFRK+8soCvwqKp2Ab4iQs1AZQlt4kOAVsCJQB2sWaS0qJV1Zar1+x7lQFAEnJS03QL4JE1pSSkRqYEFgRmq+nzY/WniVDH83Zau9KVAb+BiEdmINfn1x84QckLzAUSzvIuAIlVdHLZnY4EhymX9A2CDqm5X1W+B54FeRL+sE8or22qt36IcCJYAbUPvgprYBaaX0pymahfaxicDq1T1T0kPvQSMDPdHAnOOdNpSRVXvUNUWqpqHlesbqjoCmA8MC4dFKs8AqroV2CQip4VdA4CVRLissSahHiJSO3zXE3mOdFknKa9sXwKuCr2HegC7Ek1IVaKqkb0Bg4EPgXXAb9KdnhTlsQ92SvgesDzcBmNt5gXAmvC3YbrTmqL8nwfMDfdbA28Da4Fngex0py8F+e0MFIbyfhFoEPWyBv4AfACsAJ4CsqNY1sBM7DrIt9gv/tHllS3WNDQh1G3vY72qqvzePsWEc87FXJSbhpxzzh0EDwTOORdzHgiccy7mPBA451zMeSBwzrmY80DgXIqJyHmJGVKdOxp5IHDOuZjzQOBcICJXiMjbIrJcRCaF9Q6KReRBEVkmIgUi0jgc21lE3gpzwb+QNE/8KSIyT0TeDc9pE16+btI6AjPCKFnnjgoeCJwDROR0YDjQW1U7A/uAEdgkZ8tUtSuwALgrPGUacJuqdsJGdib2zwAmqOoZ2Jw4iWH/XYCbsLUxWmPzJTl3VMiq/BDnYmEA0A1YEn6s18Im+PoOeCYcMx14XkSOB3JUdUHYPxV4VkTqAc1V9QUAVd0NEF7vbVUtCtvLgTxgUeqz5VzlPBA4ZwSYqqp3lNgp8ttSx1U0J0tFzT17ku7vw//33FHEm4acMwXAMBHJhe/Xim2J/Y8kZrn8GbBIVXcBX4hI37D/SmCB2joQRSIyNLxGtojUPqK5cK4K/FeJc4CqrhSRO4HXRCQDmwFyDLb4S76ILMVWxxoenjISmBgq+vXAqLD/SmCSiNwdXuOyI5gN56rEZx91rgIiUqyqddOdDudSyZuGnHMu5vyMwDnnYs7PCJxzLuY8EDjnXMx5IHDOuZjzQOCcczHngcA552Lu/77AmefMQ0nPAAAAAElFTkSuQmCC\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "#Graficar aprendizaje\n", + "history_path = config['train']['saved_weights_name'].split('.')[0] + '_history'\n", + "\n", + "hist_load = np.load(history_path + '.npy',allow_pickle=True).item()\n", + "\n", + "print(hist_load.keys())\n", + "\n", + "# summarize history for loss\n", + "plt.plot(hist_load['loss'])\n", + "plt.plot(hist_load['val_loss'])\n", + "plt.title('model loss')\n", + "plt.ylabel('loss')\n", + "plt.xlabel('epoch')\n", + "plt.legend(['train', 'test'], loc='upper left')\n", + "plt.show()\n", + "\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Evaluación del Modelo" + ] + }, + { + "cell_type": "code", + "execution_count": 28, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Number of images in the evaluation dataset: 1\n", + "\n", + "\n", + "\n", + " 0%| | 0/1 [00:00 0 for x in total_instances)))\n", + "\n", + " for i in range(1, len(average_precisions)):\n", + " print(\"{:<14}{:<6}{}\".format(classes[i], 'AP', round(average_precisions[i], 3)))\n", + " print()\n", + " print(\"{:<14}{:<6}{}\".format('','mAP', round(mean_average_precision, 3)))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Cargar nuevamente el modelo desde los pesos.\n", + "Predicción" + ] + }, + { + "cell_type": "code", + "execution_count": 32, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Training on: \t{'panel': 1}\n", + "\n", + "Tiempo Total: 1.293\n", + "Tiempo promedio por imagen: 0.259\n", + "OK\n" + ] + } + ], + "source": [ + "from imageio import imread\n", + "from keras.preprocessing import image\n", + "import time\n", + "\n", + "config_path = 'config_7_panel.json'\n", + "input_path = 'panel/Mision_2/'\n", + "output_path = 'result_ssd7_panel_2/'\n", + "\n", + "with open(config_path) as config_buffer:\n", + " config = json.loads(config_buffer.read())\n", + "\n", + "makedirs(output_path)\n", + "###############################\n", + "# Parse the annotations\n", + "###############################\n", + "score_threshold = 0.3\n", + "labels = config['model']['labels']\n", + "categories = {}\n", + "#categories = {\"Razor\": 1, \"Gun\": 2, \"Knife\": 3, \"Shuriken\": 4} #la categoría 0 es la background\n", + "for i in range(len(labels)): categories[labels[i]] = i+1\n", + "print('\\nTraining on: \\t' + str(categories) + '\\n')\n", + "\n", + "img_height = config['model']['input'] # Height of the model input images\n", + "img_width = config['model']['input'] # Width of the model input images\n", + "img_channels = 3 # Number of color channels of the model input images\n", + "n_classes = len(labels) # Number of positive classes, e.g. 20 for Pascal VOC, 80 for MS COCO\n", + "classes = ['background'] + labels\n", + "\n", + "model_mode = 'training'\n", + "# TODO: Set the path to the `.h5` file of the model to be loaded.\n", + "model_path = config['train']['saved_weights_name']\n", + "\n", + "# We need to create an SSDLoss object in order to pass that to the model loader.\n", + "ssd_loss = SSDLoss(neg_pos_ratio=3, alpha=1.0)\n", + "\n", + "K.clear_session() # Clear previous models from memory.\n", + "\n", + "model = load_model(model_path, custom_objects={'AnchorBoxes': AnchorBoxes,\n", + " 'L2Normalization': L2Normalization,\n", + " 'DecodeDetections': DecodeDetections,\n", + " 'compute_loss': ssd_loss.compute_loss})\n", + "\n", + "\n", + "\n", + "\n", + "image_paths = []\n", + "\n", + "if os.path.isdir(input_path):\n", + " for inp_file in os.listdir(input_path):\n", + " image_paths += [input_path + inp_file]\n", + "else:\n", + " image_paths += [input_path]\n", + "\n", + "image_paths = [inp_file for inp_file in image_paths if (inp_file[-4:] in ['.jpg', '.png', 'JPEG'])]\n", + "times = []\n", + "\n", + "\n", + "for img_path in image_paths:\n", + " orig_images = [] # Store the images here.\n", + " input_images = [] # Store resized versions of the images here.\n", + " #print(img_path)\n", + "\n", + " # preprocess image for network\n", + " orig_images.append(imread(img_path))\n", + " img = image.load_img(img_path, target_size=(img_height, img_width))\n", + " img = image.img_to_array(img)\n", + " input_images.append(img)\n", + " input_images = np.array(input_images)\n", + " # process image\n", + " start = time.time()\n", + " y_pred = model.predict(input_images)\n", + " y_pred_decoded = decode_detections(y_pred,\n", + " confidence_thresh=score_threshold,\n", + " iou_threshold=score_threshold,\n", + " top_k=200,\n", + " normalize_coords=True,\n", + " img_height=img_height,\n", + " img_width=img_width)\n", + "\n", + "\n", + " #print(\"processing time: \", time.time() - start)\n", + " times.append(time.time() - start)\n", + " # correct for image scale\n", + "\n", + " # visualize detections\n", + " # Set the colors for the bounding boxes\n", + " colors = plt.cm.brg(np.linspace(0, 1, 21)).tolist()\n", + "\n", + " plt.figure(figsize=(20,12))\n", + " plt.imshow(orig_images[0],cmap = 'gray')\n", + "\n", + " current_axis = plt.gca()\n", + " #print(y_pred)\n", + " for box in y_pred_decoded[0]:\n", + " # Transform the predicted bounding boxes for the 300x300 image to the original image dimensions.\n", + "\n", + " xmin = box[2] * orig_images[0].shape[1] / img_width\n", + " ymin = box[3] * orig_images[0].shape[0] / img_height\n", + " xmax = box[4] * orig_images[0].shape[1] / img_width\n", + " ymax = box[5] * orig_images[0].shape[0] / img_height\n", + "\n", + " color = colors[int(box[0])]\n", + " label = '{}: {:.2f}'.format(classes[int(box[0])], box[1])\n", + " current_axis.add_patch(plt.Rectangle((xmin, ymin), xmax-xmin, ymax-ymin, color=color, fill=False, linewidth=2))\n", + " current_axis.text(xmin, ymin, label, size='x-large', color='white', bbox={'facecolor':color, 'alpha':1.0})\n", + "\n", + " #plt.figure(figsize=(15, 15))\n", + " #plt.axis('off')\n", + " save_path = output_path + img_path.split('/')[-1]\n", + " plt.savefig(save_path)\n", + " plt.close()\n", + " \n", + "file = open(output_path + 'time.txt','w')\n", + "\n", + "file.write('Tiempo promedio:' + str(np.mean(times)))\n", + "\n", + "file.close()\n", + "print('Tiempo Total: {:.3f}'.format(np.sum(times)))\n", + "print('Tiempo promedio por imagen: {:.3f}'.format(np.mean(times)))\n", + "print('OK')" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Processing image set 'training_small.txt': 100%|██████████| 45/45 [00:01<00:00, 27.96it/s]\n", + "trophozoite : 135\n", + "red blood cell : 3195\n" + ] + } + ], + "source": [ + "\n", + "# Summary instance training\n", + "category_train_list = []\n", + "for image_label in train_dataset.labels:\n", + " category_train_list += [i[0] for i in train_dataset.labels[0]]\n", + "summary_category_training = {train_dataset.classes[i]: category_train_list.count(i) for i in list(set(category_train_list))}\n", + "for i in summary_category_training.keys():\n", + " print(i, ': {:.0f}'.format(summary_category_training[i]))\n" + ] + }, + { + "cell_type": "code", + "execution_count": 26, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "45" + ] + }, + "execution_count": 26, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "\n", + "\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.6.8" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/AI Test.ipynb b/AI Test.ipynb new file mode 100644 index 0000000..db22bdd --- /dev/null +++ b/AI Test.ipynb @@ -0,0 +1,102 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Prueba de conocimientos de Aprendizaje Automatizado" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Contexto\n", + "\n", + "Esta prueba se centra en los aspectos fundamentales requeridos para la vacante disponible actualmente. la seleccion del dataset es en principio una aproximacion de los requerimientos actuales, ya que mas adelante iran aumentando en complejidad los casos a resolver, con esto se busca evaluar cada una de las habilidades y destrezas que la organizacion requiere actualmente, cualquier duda o consulta puede ser canalizada por correo electronico." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Descripción\n", + "\n", + "Prueba de conocimientos generales de Aprendizaje Automatizado (Machine Learning/Deep Learning).\n", + "\n", + "### Plazo\n", + "\n", + "1 semana a partir de la recepción de la misma.\n", + "\n", + "### Metodología\n", + "\n", + "* Sera necesario clonar este repositorio en su cuenta de [**GitHub**](https://github.com) y agregarnos como colaboradores para asi poder ver los avances y hacer revisiones.\n", + "* El modelo que se desarrolle debe estar documentado de forma procedimental en un jupyter notebook como este, puede hacer uso del mismo para la prueba o crear uno nuevo si asi gusta.\n", + "* Para agilizar el proceso de evaluacion debe seguir los pasos sugeridos en el apartado **Fuentes de datos.**\n", + "* Evaluar el rendimiento del modelo desarrollado y hacer predicciones con el mismo con imagenes que no esten contenidas dentro del dataset.\n", + "* El modelo debe ser capaz de clasificar una imagen de prueba, asignandole una de las clases previamente definida con su grado de confianza, y resaltando la region de interes con un bounding box." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Dataset\n", + "\n", + "El dataset esta disponible en el siguiente enlace [dataset](https://drive.google.com/open?id=1iBrQQaKT7SGKP77FWvt_zqJEoAvs0vgh)\n", + "\n", + "Las imágenes estan en formato .png o .jpg. Hay tres conjuntos de imagenes de un total de 1364.\n", + "\n", + "### Etiquetas\n", + "\n", + "Los datos consisten en dos clases de células no infectadas (eritrocitos y leucocitos) y cuatro clases de celulas infectadas (gametocitos, anillos, trofozoitos y esquizontes).\n", + "\n", + "Se proporciono una etiqueta de clase y un conjunto de coordenadas de cuadro delimitador para cada celda. Para todos los conjuntos de datos." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Fuentes de datos\n", + "\n", + "Para agilizar el proceso de evaluacion, es necesario informar el estado actual de la asignacion enfoncandose en los siguientes aspectos.\n", + "\n", + "- Estadísticas\n", + " * Estadísticas de resumen para cada atributo en los datos de entrada y salida.\n", + " * Mostrar en gráficos para ayudar a comprender los datos rápidamente e identificar irregularidades o errores.\n", + " \n", + "- Estado\n", + " * Indica el estado actual del origen de datos, como, In Progress (En curso), Completed (Completado) o Failed (Error)." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "*Happy Coding*" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.3" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/Flit2jpg.py b/Flit2jpg.py new file mode 100644 index 0000000..36865b6 --- /dev/null +++ b/Flit2jpg.py @@ -0,0 +1,86 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +""" +Created on Thu Jan 9 18:27:19 2020 + +@author: dlsaavedra +""" + +#! /usr/bin/env python + +import argparse +import os +import numpy as np +import errno +import flirimageextractor +import matplotlib.pyplot as plt + +argparser = argparse.ArgumentParser( + description = 'Change flirt image to jpg image') + + + +argparser.add_argument( + '-i', + '--input', + help='path to an folder of image') + + +def mkdir(filename): + if not os.path.exists(os.path.dirname(filename)): + try: + os.makedirs(os.path.dirname(filename)) + except OSError as exc: # Guard against race condition + if exc.errno != errno.EEXIST: + raise + + +def _main_(args): + + input_path = args.input + + + + + files = [] + # r=root, d=directories, f = files + for r, d, f in os.walk(input_path): + for file in f: + if '.jpg' in file: + files.append(os.path.join(r, file)) + + for f in files: + flir = flirimageextractor.FlirImageExtractor() + print(f) + try: + flir.process_image(f) + I = flirimageextractor.FlirImageExtractor.get_thermal_np(flir) + except: + I = plt.imread(f) + #flir.save_images() + #flir.plot() + + + + #img = img.astype(np.int8) + W = np.where(np.isnan(I)) + if np.shape(W)[1] > 0: + + #xmax = np.max(np.amax(W,axis=0)) + ymax = np.max(np.amin(W,axis=1)) + img = I[:ymax,:] + else: + img = I + + list_string = f.split('/') + list_string[-3]+= '_jpg' + f_aux = '/'.join(list_string) + + mkdir(f_aux) + plt.imsave(f_aux, img, cmap = 'gray') + + + +if __name__ == '__main__': + args = argparser.parse_args() + _main_(args) \ No newline at end of file diff --git a/Panel_Detector.ipynb b/Panel_Detector.ipynb new file mode 100644 index 0000000..cea1ce0 --- /dev/null +++ b/Panel_Detector.ipynb @@ -0,0 +1,1716 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Detector de Celulas" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "\n", + "\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Cargar el modelo ssd7 \n", + "(https://github.com/pierluigiferrari/ssd_keras#how-to-fine-tune-one-of-the-trained-models-on-your-own-dataset)\n", + "\n", + "Training del SSD7 (modelo reducido de SSD). Parámetros en config_7.json y descargar VGG_ILSVRC_16_layers_fc_reduced.h5\n", + "\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": 26, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Training on: \t{'panel': 1, 'cell': 2}\n", + "\n", + "\n", + "Loading pretrained weights.\n", + "\n" + ] + } + ], + "source": [ + "from keras.optimizers import Adam, SGD\n", + "from keras.callbacks import ModelCheckpoint, LearningRateScheduler, TerminateOnNaN, CSVLogger\n", + "from keras import backend as K\n", + "from keras.models import load_model\n", + "from math import ceil\n", + "import numpy as np\n", + "from matplotlib import pyplot as plt\n", + "import os\n", + "import json\n", + "import xml.etree.cElementTree as ET\n", + "\n", + "import sys\n", + "sys.path += [os.path.abspath('../ssd_keras-master')]\n", + "\n", + "from keras_loss_function.keras_ssd_loss import SSDLoss\n", + "from keras_layers.keras_layer_AnchorBoxes import AnchorBoxes\n", + "from keras_layers.keras_layer_DecodeDetections import DecodeDetections\n", + "from keras_layers.keras_layer_DecodeDetectionsFast import DecodeDetectionsFast\n", + "from keras_layers.keras_layer_L2Normalization import L2Normalization\n", + "from ssd_encoder_decoder.ssd_input_encoder import SSDInputEncoder\n", + "from ssd_encoder_decoder.ssd_output_decoder import decode_detections, decode_detections_fast\n", + "from data_generator.object_detection_2d_data_generator import DataGenerator\n", + "from data_generator.object_detection_2d_geometric_ops import Resize\n", + "from data_generator.object_detection_2d_photometric_ops import ConvertTo3Channels\n", + "from data_generator.data_augmentation_chain_original_ssd import SSDDataAugmentation\n", + "from data_generator.object_detection_2d_misc_utils import apply_inverse_transforms\n", + "from eval_utils.average_precision_evaluator import Evaluator\n", + "from data_generator.data_augmentation_chain_variable_input_size import DataAugmentationVariableInputSize\n", + "from data_generator.data_augmentation_chain_constant_input_size import DataAugmentationConstantInputSize\n", + "\n", + "\n", + "def makedirs(path):\n", + " try:\n", + " os.makedirs(path)\n", + " except OSError:\n", + " if not os.path.isdir(path):\n", + " raise\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "K.tensorflow_backend._get_available_gpus()\n", + "\n", + "\n", + "def lr_schedule(epoch):\n", + " if epoch < 80:\n", + " return 0.001\n", + " elif epoch < 100:\n", + " return 0.0001\n", + " else:\n", + " return 0.00001\n", + "\n", + "config_path = 'config_7_panel_cell.json'\n", + "\n", + "\n", + "with open(config_path) as config_buffer:\n", + " config = json.loads(config_buffer.read())\n", + "\n", + "###############################\n", + "# Parse the annotations\n", + "###############################\n", + "path_imgs_training = config['train']['train_image_folder']\n", + "path_anns_training = config['train']['train_annot_folder']\n", + "path_imgs_val = config['test']['test_image_folder']\n", + "path_anns_val = config['test']['test_annot_folder']\n", + "labels = config['model']['labels']\n", + "categories = {}\n", + "#categories = {\"Razor\": 1, \"Gun\": 2, \"Knife\": 3, \"Shuriken\": 4} #la categoría 0 es la background\n", + "for i in range(len(labels)): categories[labels[i]] = i+1\n", + "print('\\nTraining on: \\t' + str(categories) + '\\n')\n", + "\n", + "####################################\n", + "# Parameters\n", + "###################################\n", + " #%%\n", + "img_height = config['model']['input'] # Height of the model input images\n", + "img_width = config['model']['input'] # Width of the model input images\n", + "img_channels = 3 # Number of color channels of the model input images\n", + "mean_color = [123, 117, 104] # The per-channel mean of the images in the dataset. Do not change this value if you're using any of the pre-trained weights.\n", + "swap_channels = [2, 1, 0] # The color channel order in the original SSD is BGR, so we'll have the model reverse the color channel order of the input images.\n", + "n_classes = len(labels) # Number of positive classes, e.g. 20 for Pascal VOC, 80 for MS COCO\n", + "scales_pascal = [0.1, 0.2, 0.37, 0.54, 0.71, 0.88, 1.05] # The anchor box scaling factors used in the original SSD300 for the Pascal VOC datasets\n", + "#scales_coco = [0.07, 0.15, 0.33, 0.51, 0.69, 0.87, 1.05] # The anchor box scaling factors used in the original SSD300 for the MS COCO datasets\n", + "scales = scales_pascal\n", + "aspect_ratios = [[1.0, 2.0, 0.5],\n", + " [1.0, 2.0, 0.5, 3.0, 1.0/3.0],\n", + " [1.0, 2.0, 0.5, 3.0, 1.0/3.0],\n", + " [1.0, 2.0, 0.5, 3.0, 1.0/3.0],\n", + " [1.0, 2.0, 0.5],\n", + " [1.0, 2.0, 0.5]] # The anchor box aspect ratios used in the original SSD300; the order matters\n", + "two_boxes_for_ar1 = True\n", + "steps = [8, 16, 32, 64, 100, 300] # The space between two adjacent anchor box center points for each predictor layer.\n", + "offsets = [0.5, 0.5, 0.5, 0.5, 0.5, 0.5] # The offsets of the first anchor box center points from the top and left borders of the image as a fraction of the step size for each predictor layer.\n", + "clip_boxes = False # Whether or not to clip the anchor boxes to lie entirely within the image boundaries\n", + "variances = [0.1, 0.1, 0.2, 0.2] # The variances by which the encoded target coordinates are divided as in the original implementation\n", + "normalize_coords = True\n", + "\n", + "K.clear_session() # Clear previous models from memory.\n", + "\n", + "\n", + "model_path = config['train']['saved_weights_name']\n", + "# 3: Instantiate an optimizer and the SSD loss function and compile the model.\n", + "# If you want to follow the original Caffe implementation, use the preset SGD\n", + "# optimizer, otherwise I'd recommend the commented-out Adam optimizer.\n", + "\n", + "\n", + "if config['model']['backend'] == 'ssd7':\n", + " #weights_path = 'VGG_ILSVRC_16_layers_fc_reduced.h5'\n", + " scales = [0.08, 0.16, 0.32, 0.64, 0.96] # An explicit list of anchor box scaling factors. If this is passed, it will override `min_scale` and `max_scale`.\n", + " aspect_ratios = [0.5 ,1.0, 2.0] # The list of aspect ratios for the anchor boxes\n", + " two_boxes_for_ar1 = True # Whether or not you want to generate two anchor boxes for aspect ratio 1\n", + " steps = None # In case you'd like to set the step sizes for the anchor box grids manually; not recommended\n", + " offsets = None\n", + "\n", + "if os.path.exists(model_path):\n", + " print(\"\\nLoading pretrained weights.\\n\")\n", + " # We need to create an SSDLoss object in order to pass that to the model loader.\n", + " ssd_loss = SSDLoss(neg_pos_ratio=3, alpha=1.0)\n", + "\n", + " K.clear_session() # Clear previous models from memory.\n", + " model = load_model(model_path, custom_objects={'AnchorBoxes': AnchorBoxes,\n", + " 'L2Normalization': L2Normalization,\n", + " 'compute_loss': ssd_loss.compute_loss})\n", + "\n", + "\n", + "else:\n", + " ####################################\n", + " # Build the Keras model.\n", + " ###################################\n", + "\n", + " if config['model']['backend'] == 'ssd300':\n", + " #weights_path = 'VGG_VOC0712Plus_SSD_300x300_ft_iter_160000.h5'\n", + " from models.keras_ssd300 import ssd_300 as ssd\n", + "\n", + " model = ssd_300(image_size=(img_height, img_width, img_channels),\n", + " n_classes=n_classes,\n", + " mode='training',\n", + " l2_regularization=0.0005,\n", + " scales=scales,\n", + " aspect_ratios_per_layer=aspect_ratios,\n", + " two_boxes_for_ar1=two_boxes_for_ar1,\n", + " steps=steps,\n", + " offsets=offsets,\n", + " clip_boxes=clip_boxes,\n", + " variances=variances,\n", + " normalize_coords=normalize_coords,\n", + " subtract_mean=mean_color,\n", + " swap_channels=swap_channels)\n", + "\n", + "\n", + " elif config['model']['backend'] == 'ssd7':\n", + " #weights_path = 'VGG_ILSVRC_16_layers_fc_reduced.h5'\n", + " from models.keras_ssd7 import build_model as ssd\n", + " scales = [0.08, 0.16, 0.32, 0.64, 0.96] # An explicit list of anchor box scaling factors. If this is passed, it will override `min_scale` and `max_scale`.\n", + " aspect_ratios = [0.5 ,1.0, 2.0] # The list of aspect ratios for the anchor boxes\n", + " two_boxes_for_ar1 = True # Whether or not you want to generate two anchor boxes for aspect ratio 1\n", + " steps = None # In case you'd like to set the step sizes for the anchor box grids manually; not recommended\n", + " offsets = None\n", + " model = ssd(image_size=(img_height, img_width, img_channels),\n", + " n_classes=n_classes,\n", + " mode='training',\n", + " l2_regularization=0.0005,\n", + " scales=scales,\n", + " aspect_ratios_global=aspect_ratios,\n", + " aspect_ratios_per_layer=None,\n", + " two_boxes_for_ar1=two_boxes_for_ar1,\n", + " steps=steps,\n", + " offsets=offsets,\n", + " clip_boxes=clip_boxes,\n", + " variances=variances,\n", + " normalize_coords=normalize_coords,\n", + " subtract_mean=None,\n", + " divide_by_stddev=None)\n", + "\n", + " else :\n", + " print('Wrong Backend')\n", + "\n", + "\n", + "\n", + " print('OK create model')\n", + " #sgd = SGD(lr=config['train']['learning_rate'], momentum=0.9, decay=0.0, nesterov=False)\n", + "\n", + " # TODO: Set the path to the weights you want to load. only for ssd300 or ssd512\n", + "\n", + " weights_path = '../ssd_keras-master/VGG_ILSVRC_16_layers_fc_reduced.h5'\n", + " print(\"\\nLoading pretrained weights VGG.\\n\")\n", + " model.load_weights(weights_path, by_name=True)\n", + "\n", + " # 3: Instantiate an optimizer and the SSD loss function and compile the model.\n", + " # If you want to follow the original Caffe implementation, use the preset SGD\n", + " # optimizer, otherwise I'd recommend the commented-out Adam optimizer.\n", + "\n", + "\n", + " #adam = Adam(lr=0.001, beta_1=0.9, beta_2=0.999, epsilon=1e-08, decay=0.0)\n", + " #sgd = SGD(lr=0.001, momentum=0.9, decay=0.0, nesterov=False)\n", + " optimizer = Adam(lr=config['train']['learning_rate'], beta_1=0.9, beta_2=0.999, epsilon=1e-08, decay=0.0)\n", + " ssd_loss = SSDLoss(neg_pos_ratio=3, alpha=1.0)\n", + " model.compile(optimizer=optimizer, loss=ssd_loss.compute_loss)\n", + "\n", + " model.summary()\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Instanciar los generadores de datos y entrenamiento del modelo.\n", + "\n", + "*Cambio realizado para leer png y jpg. keras-ssd-master/data_generator/object_detection_2d_data_generator.py función parse_xml\n" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Processing image set 'train.txt': 100%|██████████| 1/1 [00:00<00:00, 3.02it/s]\n", + "Processing image set 'test.txt': 100%|██████████| 1/1 [00:00<00:00, 2.48it/s]\n", + "panel : 69\n", + "cell : 423\n", + "Number of images in the training dataset:\t 1\n", + "Number of images in the validation dataset:\t 1\n", + "Epoch 1/100\n", + "\n", + "Epoch 00001: LearningRateScheduler setting learning rate to 0.001.\n", + "50/50 [==============================] - 200s 4s/step - loss: 13.2409 - val_loss: 9.9807\n", + "\n", + "Epoch 00001: val_loss improved from inf to 9.98075, saving model to experimento_ssd7_panel_cell.h5\n", + "Epoch 2/100\n", + "\n", + "Epoch 00002: LearningRateScheduler setting learning rate to 0.001.\n", + "50/50 [==============================] - 238s 5s/step - loss: 9.8864 - val_loss: 11.1452\n", + "\n", + "Epoch 00002: val_loss did not improve from 9.98075\n", + "Epoch 3/100\n", + "\n", + "Epoch 00003: LearningRateScheduler setting learning rate to 0.001.\n", + "50/50 [==============================] - 226s 5s/step - loss: 8.8060 - val_loss: 8.3006\n", + "\n", + "Epoch 00003: val_loss improved from 9.98075 to 8.30060, saving model to experimento_ssd7_panel_cell.h5\n", + "Epoch 4/100\n", + "\n", + "Epoch 00004: LearningRateScheduler setting learning rate to 0.001.\n", + "50/50 [==============================] - 199s 4s/step - loss: 7.4999 - val_loss: 8.9384\n", + "\n", + "Epoch 00004: val_loss did not improve from 8.30060\n", + "Epoch 5/100\n", + "\n", + "Epoch 00005: LearningRateScheduler setting learning rate to 0.001.\n", + "50/50 [==============================] - 187s 4s/step - loss: 7.4727 - val_loss: 7.9512\n", + "\n", + "Epoch 00005: val_loss improved from 8.30060 to 7.95121, saving model to experimento_ssd7_panel_cell.h5\n", + "Epoch 6/100\n", + "\n", + "Epoch 00006: LearningRateScheduler setting learning rate to 0.001.\n", + "50/50 [==============================] - 213s 4s/step - loss: 6.8813 - val_loss: 11.2544\n", + "\n", + "Epoch 00006: val_loss did not improve from 7.95121\n", + "Epoch 7/100\n", + "\n", + "Epoch 00007: LearningRateScheduler setting learning rate to 0.001.\n", + "50/50 [==============================] - 195s 4s/step - loss: 6.4775 - val_loss: 6.9093\n", + "\n", + "Epoch 00007: val_loss improved from 7.95121 to 6.90929, saving model to experimento_ssd7_panel_cell.h5\n", + "Epoch 8/100\n", + "\n", + "Epoch 00008: LearningRateScheduler setting learning rate to 0.001.\n", + "50/50 [==============================] - 212s 4s/step - loss: 6.9758 - val_loss: 8.6997\n", + "\n", + "Epoch 00008: val_loss did not improve from 6.90929\n", + "Epoch 9/100\n", + "\n", + "Epoch 00009: LearningRateScheduler setting learning rate to 0.001.\n", + "50/50 [==============================] - 199s 4s/step - loss: 6.1539 - val_loss: 10.9586\n", + "\n", + "Epoch 00009: val_loss did not improve from 6.90929\n", + "Epoch 10/100\n", + "\n", + "Epoch 00010: LearningRateScheduler setting learning rate to 0.001.\n", + "50/50 [==============================] - 206s 4s/step - loss: 5.9307 - val_loss: 8.4361\n", + "\n", + "Epoch 00010: val_loss did not improve from 6.90929\n", + "Epoch 11/100\n", + "\n", + "Epoch 00011: LearningRateScheduler setting learning rate to 0.001.\n", + "50/50 [==============================] - 197s 4s/step - loss: 5.3895 - val_loss: 5.9796\n", + "\n", + "Epoch 00011: val_loss improved from 6.90929 to 5.97960, saving model to experimento_ssd7_panel_cell.h5\n", + "Epoch 12/100\n", + "\n", + "Epoch 00012: LearningRateScheduler setting learning rate to 0.001.\n", + "50/50 [==============================] - 184s 4s/step - loss: 5.0889 - val_loss: 5.9283\n", + "\n", + "Epoch 00012: val_loss improved from 5.97960 to 5.92832, saving model to experimento_ssd7_panel_cell.h5\n", + "Epoch 13/100\n", + "\n", + "Epoch 00013: LearningRateScheduler setting learning rate to 0.001.\n", + "50/50 [==============================] - 193s 4s/step - loss: 5.7916 - val_loss: 6.7706\n", + "\n", + "Epoch 00013: val_loss did not improve from 5.92832\n", + "Epoch 14/100\n", + "\n", + "Epoch 00014: LearningRateScheduler setting learning rate to 0.001.\n", + "50/50 [==============================] - 222s 4s/step - loss: 5.3010 - val_loss: 7.8910\n", + "\n", + "Epoch 00014: val_loss did not improve from 5.92832\n", + "Epoch 15/100\n", + "\n", + "Epoch 00015: LearningRateScheduler setting learning rate to 0.001.\n", + "50/50 [==============================] - 179s 4s/step - loss: 4.9873 - val_loss: 6.0389\n", + "\n", + "Epoch 00015: val_loss did not improve from 5.92832\n", + "Epoch 16/100\n", + "\n", + "Epoch 00016: LearningRateScheduler setting learning rate to 0.001.\n", + "50/50 [==============================] - 182s 4s/step - loss: 5.4664 - val_loss: 6.4125\n", + "\n", + "Epoch 00016: val_loss did not improve from 5.92832\n", + "Epoch 17/100\n", + "\n", + "Epoch 00017: LearningRateScheduler setting learning rate to 0.001.\n", + "50/50 [==============================] - 166s 3s/step - loss: 6.0094 - val_loss: 9.2918\n", + "\n", + "Epoch 00017: val_loss did not improve from 5.92832\n", + "Epoch 18/100\n", + "\n", + "Epoch 00018: LearningRateScheduler setting learning rate to 0.001.\n", + "50/50 [==============================] - 181s 4s/step - loss: 5.1737 - val_loss: 7.6806\n", + "\n", + "Epoch 00018: val_loss did not improve from 5.92832\n", + "Epoch 19/100\n", + "\n", + "Epoch 00019: LearningRateScheduler setting learning rate to 0.001.\n", + "50/50 [==============================] - 159s 3s/step - loss: 5.2708 - val_loss: 7.1096\n", + "\n", + "Epoch 00019: val_loss did not improve from 5.92832\n", + "Epoch 20/100\n", + "\n", + "Epoch 00020: LearningRateScheduler setting learning rate to 0.001.\n", + "50/50 [==============================] - 173s 3s/step - loss: 5.4765 - val_loss: 5.4921\n", + "\n", + "Epoch 00020: val_loss improved from 5.92832 to 5.49211, saving model to experimento_ssd7_panel_cell.h5\n", + "Epoch 21/100\n", + "\n", + "Epoch 00021: LearningRateScheduler setting learning rate to 0.001.\n", + "50/50 [==============================] - 170s 3s/step - loss: 4.6517 - val_loss: 6.6033\n", + "\n", + "Epoch 00021: val_loss did not improve from 5.49211\n", + "Epoch 22/100\n", + "\n", + "Epoch 00022: LearningRateScheduler setting learning rate to 0.001.\n", + "50/50 [==============================] - 191s 4s/step - loss: 5.1432 - val_loss: 5.6549\n", + "\n", + "Epoch 00022: val_loss did not improve from 5.49211\n", + "Epoch 23/100\n", + "\n", + "Epoch 00023: LearningRateScheduler setting learning rate to 0.001.\n", + "50/50 [==============================] - 159s 3s/step - loss: 5.4830 - val_loss: 5.8758\n", + "\n", + "Epoch 00023: val_loss did not improve from 5.49211\n", + "Epoch 24/100\n", + "\n", + "Epoch 00024: LearningRateScheduler setting learning rate to 0.001.\n", + "50/50 [==============================] - 150s 3s/step - loss: 5.3366 - val_loss: 5.3871\n", + "\n", + "Epoch 00024: val_loss improved from 5.49211 to 5.38706, saving model to experimento_ssd7_panel_cell.h5\n", + "Epoch 25/100\n", + "\n", + "Epoch 00025: LearningRateScheduler setting learning rate to 0.001.\n", + "50/50 [==============================] - 138s 3s/step - loss: 5.7189 - val_loss: 8.0760\n", + "\n", + "Epoch 00025: val_loss did not improve from 5.38706\n", + "Epoch 26/100\n", + "\n", + "Epoch 00026: LearningRateScheduler setting learning rate to 0.001.\n", + "50/50 [==============================] - 144s 3s/step - loss: 6.0929 - val_loss: 12.6163\n", + "\n", + "Epoch 00026: val_loss did not improve from 5.38706\n", + "Epoch 27/100\n", + "\n", + "Epoch 00027: LearningRateScheduler setting learning rate to 0.001.\n", + "50/50 [==============================] - 147s 3s/step - loss: 5.2239 - val_loss: 9.8536\n", + "\n", + "Epoch 00027: val_loss did not improve from 5.38706\n", + "Epoch 28/100\n", + "\n", + "Epoch 00028: LearningRateScheduler setting learning rate to 0.001.\n", + "50/50 [==============================] - 158s 3s/step - loss: 5.4414 - val_loss: 6.4950\n", + "\n", + "Epoch 00028: val_loss did not improve from 5.38706\n", + "Epoch 29/100\n", + "\n", + "Epoch 00029: LearningRateScheduler setting learning rate to 0.001.\n", + "50/50 [==============================] - 157s 3s/step - loss: 5.4436 - val_loss: 9.0002\n", + "\n", + "Epoch 00029: val_loss did not improve from 5.38706\n", + "Epoch 30/100\n", + "\n", + "Epoch 00030: LearningRateScheduler setting learning rate to 0.001.\n", + "50/50 [==============================] - 162s 3s/step - loss: 4.9780 - val_loss: 4.9993\n", + "\n", + "Epoch 00030: val_loss improved from 5.38706 to 4.99925, saving model to experimento_ssd7_panel_cell.h5\n", + "Epoch 31/100\n", + "\n", + "Epoch 00031: LearningRateScheduler setting learning rate to 0.001.\n", + "50/50 [==============================] - 140s 3s/step - loss: 4.9645 - val_loss: 5.6612\n", + "\n", + "Epoch 00031: val_loss did not improve from 4.99925\n", + "Epoch 32/100\n", + "\n", + "Epoch 00032: LearningRateScheduler setting learning rate to 0.001.\n", + "50/50 [==============================] - 141s 3s/step - loss: 4.5982 - val_loss: 5.2083\n", + "\n", + "Epoch 00032: val_loss did not improve from 4.99925\n", + "Epoch 33/100\n", + "\n", + "Epoch 00033: LearningRateScheduler setting learning rate to 0.001.\n", + "50/50 [==============================] - 143s 3s/step - loss: 4.3101 - val_loss: 6.4808\n", + "\n", + "Epoch 00033: val_loss did not improve from 4.99925\n", + "Epoch 34/100\n", + "\n", + "Epoch 00034: LearningRateScheduler setting learning rate to 0.001.\n", + "50/50 [==============================] - 145s 3s/step - loss: 4.4252 - val_loss: 10.9472\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Epoch 00034: val_loss did not improve from 4.99925\n", + "Epoch 35/100\n", + "\n", + "Epoch 00035: LearningRateScheduler setting learning rate to 0.001.\n", + "50/50 [==============================] - 153s 3s/step - loss: 4.4998 - val_loss: 7.1254\n", + "\n", + "Epoch 00035: val_loss did not improve from 4.99925\n", + "Epoch 36/100\n", + "\n", + "Epoch 00036: LearningRateScheduler setting learning rate to 0.001.\n", + "50/50 [==============================] - 153s 3s/step - loss: 4.8952 - val_loss: 7.0446\n", + "\n", + "Epoch 00036: val_loss did not improve from 4.99925\n", + "Epoch 37/100\n", + "\n", + "Epoch 00037: LearningRateScheduler setting learning rate to 0.001.\n", + "50/50 [==============================] - 154s 3s/step - loss: 4.9868 - val_loss: 9.3251\n", + "\n", + "Epoch 00037: val_loss did not improve from 4.99925\n", + "Epoch 38/100\n", + "\n", + "Epoch 00038: LearningRateScheduler setting learning rate to 0.001.\n", + "50/50 [==============================] - 148s 3s/step - loss: 4.8918 - val_loss: 5.1689\n", + "\n", + "Epoch 00038: val_loss did not improve from 4.99925\n", + "Epoch 39/100\n", + "\n", + "Epoch 00039: LearningRateScheduler setting learning rate to 0.001.\n", + "50/50 [==============================] - 143s 3s/step - loss: 4.5572 - val_loss: 4.9839\n", + "\n", + "Epoch 00039: val_loss improved from 4.99925 to 4.98394, saving model to experimento_ssd7_panel_cell.h5\n", + "Epoch 40/100\n", + "\n", + "Epoch 00040: LearningRateScheduler setting learning rate to 0.001.\n", + "50/50 [==============================] - 150s 3s/step - loss: 4.4722 - val_loss: 5.7133\n", + "\n", + "Epoch 00040: val_loss did not improve from 4.98394\n", + "Epoch 41/100\n", + "\n", + "Epoch 00041: LearningRateScheduler setting learning rate to 0.001.\n", + "50/50 [==============================] - 152s 3s/step - loss: 4.9414 - val_loss: 5.5843\n", + "\n", + "Epoch 00041: val_loss did not improve from 4.98394\n", + "Epoch 42/100\n", + "\n", + "Epoch 00042: LearningRateScheduler setting learning rate to 0.001.\n", + "50/50 [==============================] - 148s 3s/step - loss: 4.5857 - val_loss: 5.1884\n", + "\n", + "Epoch 00042: val_loss did not improve from 4.98394\n", + "Epoch 43/100\n", + "\n", + "Epoch 00043: LearningRateScheduler setting learning rate to 0.001.\n", + "50/50 [==============================] - 149s 3s/step - loss: 4.7094 - val_loss: 6.7545\n", + "\n", + "Epoch 00043: val_loss did not improve from 4.98394\n", + "Epoch 44/100\n", + "\n", + "Epoch 00044: LearningRateScheduler setting learning rate to 0.001.\n", + "50/50 [==============================] - 151s 3s/step - loss: 5.0428 - val_loss: 5.2691\n", + "\n", + "Epoch 00044: val_loss did not improve from 4.98394\n", + "Epoch 45/100\n", + "\n", + "Epoch 00045: LearningRateScheduler setting learning rate to 0.001.\n", + "50/50 [==============================] - 146s 3s/step - loss: 4.9842 - val_loss: 6.5112\n", + "\n", + "Epoch 00045: val_loss did not improve from 4.98394\n", + "Epoch 46/100\n", + "\n", + "Epoch 00046: LearningRateScheduler setting learning rate to 0.001.\n", + "50/50 [==============================] - 147s 3s/step - loss: 4.9108 - val_loss: 6.0670\n", + "\n", + "Epoch 00046: val_loss did not improve from 4.98394\n", + "Epoch 47/100\n", + "\n", + "Epoch 00047: LearningRateScheduler setting learning rate to 0.001.\n", + "50/50 [==============================] - 155s 3s/step - loss: 4.6837 - val_loss: 5.8351\n", + "\n", + "Epoch 00047: val_loss did not improve from 4.98394\n", + "Epoch 48/100\n", + "\n", + "Epoch 00048: LearningRateScheduler setting learning rate to 0.001.\n", + "50/50 [==============================] - 149s 3s/step - loss: 5.1042 - val_loss: 5.1778\n", + "\n", + "Epoch 00048: val_loss did not improve from 4.98394\n", + "Epoch 49/100\n", + "\n", + "Epoch 00049: LearningRateScheduler setting learning rate to 0.001.\n", + "50/50 [==============================] - 144s 3s/step - loss: 4.1312 - val_loss: 5.9606\n", + "\n", + "Epoch 00049: val_loss did not improve from 4.98394\n", + "Epoch 50/100\n", + "\n", + "Epoch 00050: LearningRateScheduler setting learning rate to 0.001.\n", + "50/50 [==============================] - 122s 2s/step - loss: 4.5373 - val_loss: 5.4351\n", + "\n", + "Epoch 00050: val_loss did not improve from 4.98394\n", + "Epoch 51/100\n", + "\n", + "Epoch 00051: LearningRateScheduler setting learning rate to 0.001.\n", + "50/50 [==============================] - 135s 3s/step - loss: 4.8955 - val_loss: 6.0315\n", + "\n", + "Epoch 00051: val_loss did not improve from 4.98394\n", + "Epoch 52/100\n", + "\n", + "Epoch 00052: LearningRateScheduler setting learning rate to 0.001.\n", + "50/50 [==============================] - 150s 3s/step - loss: 4.9445 - val_loss: 5.7199\n", + "\n", + "Epoch 00052: val_loss did not improve from 4.98394\n", + "Epoch 53/100\n", + "\n", + "Epoch 00053: LearningRateScheduler setting learning rate to 0.001.\n", + "50/50 [==============================] - 139s 3s/step - loss: 3.9748 - val_loss: 5.5974\n", + "\n", + "Epoch 00053: val_loss did not improve from 4.98394\n", + "Epoch 54/100\n", + "\n", + "Epoch 00054: LearningRateScheduler setting learning rate to 0.001.\n", + "50/50 [==============================] - 153s 3s/step - loss: 4.8783 - val_loss: 8.6056\n", + "\n", + "Epoch 00054: val_loss did not improve from 4.98394\n", + "Epoch 55/100\n", + "\n", + "Epoch 00055: LearningRateScheduler setting learning rate to 0.001.\n", + "50/50 [==============================] - 141s 3s/step - loss: 4.1649 - val_loss: 6.0042\n", + "\n", + "Epoch 00055: val_loss did not improve from 4.98394\n", + "Epoch 56/100\n", + "\n", + "Epoch 00056: LearningRateScheduler setting learning rate to 0.001.\n", + "50/50 [==============================] - 149s 3s/step - loss: 4.8997 - val_loss: 9.1298\n", + "\n", + "Epoch 00056: val_loss did not improve from 4.98394\n", + "Epoch 57/100\n", + "\n", + "Epoch 00057: LearningRateScheduler setting learning rate to 0.001.\n", + "50/50 [==============================] - 151s 3s/step - loss: 4.4433 - val_loss: 7.1151\n", + "\n", + "Epoch 00057: val_loss did not improve from 4.98394\n", + "Epoch 58/100\n", + "\n", + "Epoch 00058: LearningRateScheduler setting learning rate to 0.001.\n", + "50/50 [==============================] - 147s 3s/step - loss: 4.5827 - val_loss: 5.4356\n", + "\n", + "Epoch 00058: val_loss did not improve from 4.98394\n", + "Epoch 59/100\n", + "\n", + "Epoch 00059: LearningRateScheduler setting learning rate to 0.001.\n", + "50/50 [==============================] - 137s 3s/step - loss: 3.9437 - val_loss: 4.7926\n", + "\n", + "Epoch 00059: val_loss improved from 4.98394 to 4.79262, saving model to experimento_ssd7_panel_cell.h5\n", + "Epoch 60/100\n", + "\n", + "Epoch 00060: LearningRateScheduler setting learning rate to 0.001.\n", + "50/50 [==============================] - 125s 3s/step - loss: 4.0939 - val_loss: 5.7098\n", + "\n", + "Epoch 00060: val_loss did not improve from 4.79262\n", + "Epoch 61/100\n", + "\n", + "Epoch 00061: LearningRateScheduler setting learning rate to 0.001.\n", + "50/50 [==============================] - 161s 3s/step - loss: 5.1152 - val_loss: 5.2079\n", + "\n", + "Epoch 00061: val_loss did not improve from 4.79262\n", + "Epoch 62/100\n", + "\n", + "Epoch 00062: LearningRateScheduler setting learning rate to 0.001.\n", + "50/50 [==============================] - 144s 3s/step - loss: 4.2958 - val_loss: 4.9239\n", + "\n", + "Epoch 00062: val_loss did not improve from 4.79262\n", + "Epoch 63/100\n", + "\n", + "Epoch 00063: LearningRateScheduler setting learning rate to 0.001.\n", + "50/50 [==============================] - 141s 3s/step - loss: 3.8241 - val_loss: 4.5443\n", + "\n", + "Epoch 00063: val_loss improved from 4.79262 to 4.54430, saving model to experimento_ssd7_panel_cell.h5\n", + "Epoch 64/100\n", + "\n", + "Epoch 00064: LearningRateScheduler setting learning rate to 0.001.\n", + "50/50 [==============================] - 134s 3s/step - loss: 4.7252 - val_loss: 5.9445\n", + "\n", + "Epoch 00064: val_loss did not improve from 4.54430\n", + "Epoch 65/100\n", + "\n", + "Epoch 00065: LearningRateScheduler setting learning rate to 0.001.\n", + "50/50 [==============================] - 154s 3s/step - loss: 4.4455 - val_loss: 4.8326\n", + "\n", + "Epoch 00065: val_loss did not improve from 4.54430\n", + "Epoch 66/100\n", + "\n", + "Epoch 00066: LearningRateScheduler setting learning rate to 0.001.\n", + "50/50 [==============================] - 145s 3s/step - loss: 4.4054 - val_loss: 5.6441\n", + "\n", + "Epoch 00066: val_loss did not improve from 4.54430\n", + "Epoch 67/100\n", + "\n", + "Epoch 00067: LearningRateScheduler setting learning rate to 0.001.\n", + "50/50 [==============================] - 124s 2s/step - loss: 4.4165 - val_loss: 6.8159\n", + "\n", + "Epoch 00067: val_loss did not improve from 4.54430\n", + "Epoch 68/100\n", + "\n", + "Epoch 00068: LearningRateScheduler setting learning rate to 0.001.\n", + "50/50 [==============================] - 162s 3s/step - loss: 5.0418 - val_loss: 4.8508\n", + "\n", + "Epoch 00068: val_loss did not improve from 4.54430\n", + "Epoch 69/100\n", + "\n", + "Epoch 00069: LearningRateScheduler setting learning rate to 0.001.\n", + "50/50 [==============================] - 140s 3s/step - loss: 4.1512 - val_loss: 5.4053\n", + "\n", + "Epoch 00069: val_loss did not improve from 4.54430\n", + "Epoch 70/100\n", + "\n", + "Epoch 00070: LearningRateScheduler setting learning rate to 0.001.\n", + "50/50 [==============================] - 148s 3s/step - loss: 4.6197 - val_loss: 5.2824\n", + "\n", + "Epoch 00070: val_loss did not improve from 4.54430\n", + "Epoch 71/100\n", + "\n", + "Epoch 00071: LearningRateScheduler setting learning rate to 0.001.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "50/50 [==============================] - 152s 3s/step - loss: 4.2807 - val_loss: 5.5992\n", + "\n", + "Epoch 00071: val_loss did not improve from 4.54430\n", + "Epoch 72/100\n", + "\n", + "Epoch 00072: LearningRateScheduler setting learning rate to 0.001.\n", + "50/50 [==============================] - 143s 3s/step - loss: 4.5368 - val_loss: 6.5207\n", + "\n", + "Epoch 00072: val_loss did not improve from 4.54430\n", + "Epoch 73/100\n", + "\n", + "Epoch 00073: LearningRateScheduler setting learning rate to 0.001.\n", + "50/50 [==============================] - 141s 3s/step - loss: 4.0598 - val_loss: 5.2421\n", + "\n", + "Epoch 00073: val_loss did not improve from 4.54430\n", + "Epoch 74/100\n", + "\n", + "Epoch 00074: LearningRateScheduler setting learning rate to 0.001.\n", + "50/50 [==============================] - 150s 3s/step - loss: 4.4861 - val_loss: 5.4182\n", + "\n", + "Epoch 00074: val_loss did not improve from 4.54430\n", + "Epoch 75/100\n", + "\n", + "Epoch 00075: LearningRateScheduler setting learning rate to 0.001.\n", + "50/50 [==============================] - 144s 3s/step - loss: 4.5263 - val_loss: 4.3774\n", + "\n", + "Epoch 00075: val_loss improved from 4.54430 to 4.37742, saving model to experimento_ssd7_panel_cell.h5\n", + "Epoch 76/100\n", + "\n", + "Epoch 00076: LearningRateScheduler setting learning rate to 0.001.\n", + "50/50 [==============================] - 148s 3s/step - loss: 3.8465 - val_loss: 4.5809\n", + "\n", + "Epoch 00076: val_loss did not improve from 4.37742\n", + "Epoch 77/100\n", + "\n", + "Epoch 00077: LearningRateScheduler setting learning rate to 0.001.\n", + "50/50 [==============================] - 152s 3s/step - loss: 4.0495 - val_loss: 4.9745\n", + "\n", + "Epoch 00077: val_loss did not improve from 4.37742\n", + "Epoch 78/100\n", + "\n", + "Epoch 00078: LearningRateScheduler setting learning rate to 0.001.\n", + "50/50 [==============================] - 152s 3s/step - loss: 4.6009 - val_loss: 13.4989\n", + "\n", + "Epoch 00078: val_loss did not improve from 4.37742\n", + "Epoch 79/100\n", + "\n", + "Epoch 00079: LearningRateScheduler setting learning rate to 0.001.\n", + "50/50 [==============================] - 142s 3s/step - loss: 4.6687 - val_loss: 6.4490\n", + "\n", + "Epoch 00079: val_loss did not improve from 4.37742\n", + "Epoch 80/100\n", + "\n", + "Epoch 00080: LearningRateScheduler setting learning rate to 0.001.\n", + "50/50 [==============================] - 147s 3s/step - loss: 4.5297 - val_loss: 8.0478\n", + "\n", + "Epoch 00080: val_loss did not improve from 4.37742\n", + "Epoch 81/100\n", + "\n", + "Epoch 00081: LearningRateScheduler setting learning rate to 0.0001.\n", + "50/50 [==============================] - 141s 3s/step - loss: 4.2662 - val_loss: 5.7929\n", + "\n", + "Epoch 00081: val_loss did not improve from 4.37742\n", + "Epoch 82/100\n", + "\n", + "Epoch 00082: LearningRateScheduler setting learning rate to 0.0001.\n", + "50/50 [==============================] - 149s 3s/step - loss: 4.1048 - val_loss: 4.6117\n", + "\n", + "Epoch 00082: val_loss did not improve from 4.37742\n", + "Epoch 83/100\n", + "\n", + "Epoch 00083: LearningRateScheduler setting learning rate to 0.0001.\n", + "50/50 [==============================] - 156s 3s/step - loss: 3.9905 - val_loss: 4.5542\n", + "\n", + "Epoch 00083: val_loss did not improve from 4.37742\n", + "Epoch 84/100\n", + "\n", + "Epoch 00084: LearningRateScheduler setting learning rate to 0.0001.\n", + "50/50 [==============================] - 155s 3s/step - loss: 4.3129 - val_loss: 4.4676\n", + "\n", + "Epoch 00084: val_loss did not improve from 4.37742\n", + "Epoch 85/100\n", + "\n", + "Epoch 00085: LearningRateScheduler setting learning rate to 0.0001.\n", + "50/50 [==============================] - 156s 3s/step - loss: 3.7951 - val_loss: 4.4689\n", + "\n", + "Epoch 00085: val_loss did not improve from 4.37742\n", + "Epoch 86/100\n", + "\n", + "Epoch 00086: LearningRateScheduler setting learning rate to 0.0001.\n", + "50/50 [==============================] - 155s 3s/step - loss: 4.3618 - val_loss: 4.4048\n", + "\n", + "Epoch 00086: val_loss did not improve from 4.37742\n", + "Epoch 87/100\n", + "\n", + "Epoch 00087: LearningRateScheduler setting learning rate to 0.0001.\n", + "50/50 [==============================] - 156s 3s/step - loss: 4.3538 - val_loss: 4.6832\n", + "\n", + "Epoch 00087: val_loss did not improve from 4.37742\n", + "Epoch 88/100\n", + "\n", + "Epoch 00088: LearningRateScheduler setting learning rate to 0.0001.\n", + "50/50 [==============================] - 152s 3s/step - loss: 4.2076 - val_loss: 4.4796\n", + "\n", + "Epoch 00088: val_loss did not improve from 4.37742\n", + "Epoch 89/100\n", + "\n", + "Epoch 00089: LearningRateScheduler setting learning rate to 0.0001.\n", + "50/50 [==============================] - 146s 3s/step - loss: 4.1322 - val_loss: 4.5462\n", + "\n", + "Epoch 00089: val_loss did not improve from 4.37742\n", + "Epoch 90/100\n", + "\n", + "Epoch 00090: LearningRateScheduler setting learning rate to 0.0001.\n", + "50/50 [==============================] - 157s 3s/step - loss: 4.4995 - val_loss: 4.5660\n", + "\n", + "Epoch 00090: val_loss did not improve from 4.37742\n", + "Epoch 91/100\n", + "\n", + "Epoch 00091: LearningRateScheduler setting learning rate to 0.0001.\n", + "50/50 [==============================] - 158s 3s/step - loss: 4.2653 - val_loss: 4.5265\n", + "\n", + "Epoch 00091: val_loss did not improve from 4.37742\n", + "Epoch 92/100\n", + "\n", + "Epoch 00092: LearningRateScheduler setting learning rate to 0.0001.\n", + "50/50 [==============================] - 153s 3s/step - loss: 4.3702 - val_loss: 4.5276\n", + "\n", + "Epoch 00092: val_loss did not improve from 4.37742\n", + "Epoch 93/100\n", + "\n", + "Epoch 00093: LearningRateScheduler setting learning rate to 0.0001.\n", + "50/50 [==============================] - 153s 3s/step - loss: 3.7340 - val_loss: 4.5439\n", + "\n", + "Epoch 00093: val_loss did not improve from 4.37742\n", + "Epoch 94/100\n", + "\n", + "Epoch 00094: LearningRateScheduler setting learning rate to 0.0001.\n", + "50/50 [==============================] - 151s 3s/step - loss: 4.0253 - val_loss: 4.3250\n", + "\n", + "Epoch 00094: val_loss improved from 4.37742 to 4.32498, saving model to experimento_ssd7_panel_cell.h5\n", + "Epoch 95/100\n", + "\n", + "Epoch 00095: LearningRateScheduler setting learning rate to 0.0001.\n", + "50/50 [==============================] - 143s 3s/step - loss: 4.0254 - val_loss: 4.6277\n", + "\n", + "Epoch 00095: val_loss did not improve from 4.32498\n", + "Epoch 96/100\n", + "\n", + "Epoch 00096: LearningRateScheduler setting learning rate to 0.0001.\n", + "50/50 [==============================] - 148s 3s/step - loss: 3.9857 - val_loss: 4.2953\n", + "\n", + "Epoch 00096: val_loss improved from 4.32498 to 4.29533, saving model to experimento_ssd7_panel_cell.h5\n", + "Epoch 97/100\n", + "\n", + "Epoch 00097: LearningRateScheduler setting learning rate to 0.0001.\n", + "50/50 [==============================] - 157s 3s/step - loss: 3.6750 - val_loss: 4.5637\n", + "\n", + "Epoch 00097: val_loss did not improve from 4.29533\n", + "Epoch 98/100\n", + "\n", + "Epoch 00098: LearningRateScheduler setting learning rate to 0.0001.\n", + "50/50 [==============================] - 154s 3s/step - loss: 3.7435 - val_loss: 4.3923\n", + "\n", + "Epoch 00098: val_loss did not improve from 4.29533\n", + "Epoch 99/100\n", + "\n", + "Epoch 00099: LearningRateScheduler setting learning rate to 0.0001.\n", + "50/50 [==============================] - 162s 3s/step - loss: 4.0930 - val_loss: 4.4010\n", + "\n", + "Epoch 00099: val_loss did not improve from 4.29533\n", + "Epoch 100/100\n", + "\n", + "Epoch 00100: LearningRateScheduler setting learning rate to 0.0001.\n", + "50/50 [==============================] - 134s 3s/step - loss: 3.8983 - val_loss: 4.4451\n", + "\n", + "Epoch 00100: val_loss did not improve from 4.29533\n" + ] + } + ], + "source": [ + "#ENTRENAMIENTO DE MODELO\n", + "#####################################################################\n", + "# Instantiate two `DataGenerator` objects: One for training, one for validation.\n", + "######################################################################\n", + "# Optional: If you have enough memory, consider loading the images into memory for the reasons explained above.\n", + "\n", + "train_dataset = DataGenerator(load_images_into_memory=False, hdf5_dataset_path=None)\n", + "val_dataset = DataGenerator(load_images_into_memory=False, hdf5_dataset_path=None)\n", + "\n", + "# 2: Parse the image and label lists for the training and validation datasets. This can take a while.\n", + "\n", + "\n", + "\n", + "# The XML parser needs to now what object class names to look for and in which order to map them to integers.\n", + "classes = ['background' ] + labels\n", + "\n", + "train_dataset.parse_xml(images_dirs= [config['train']['train_image_folder']],\n", + " image_set_filenames=[config['train']['train_image_set_filename']],\n", + " annotations_dirs=[config['train']['train_annot_folder']],\n", + " classes=classes,\n", + " include_classes='all',\n", + " #classes = ['background', 'panel', 'cell'], \n", + " #include_classes=classes,\n", + " exclude_truncated=False,\n", + " exclude_difficult=False,\n", + " ret=False)\n", + "\n", + "val_dataset.parse_xml(images_dirs= [config['test']['test_image_folder']],\n", + " image_set_filenames=[config['test']['test_image_set_filename']],\n", + " annotations_dirs=[config['test']['test_annot_folder']],\n", + " classes=classes,\n", + " include_classes='all',\n", + " #classes = ['background', 'panel', 'cell'], \n", + " #include_classes=classes,\n", + " exclude_truncated=False,\n", + " exclude_difficult=False,\n", + " ret=False)\n", + "\n", + "#########################\n", + "# 3: Set the batch size.\n", + "#########################\n", + "batch_size = config['train']['batch_size'] # Change the batch size if you like, or if you run into GPU memory issues.\n", + "\n", + "##########################\n", + "# 4: Set the image transformations for pre-processing and data augmentation options.\n", + "##########################\n", + "# For the training generator:\n", + "\n", + "\n", + "# For the validation generator:\n", + "convert_to_3_channels = ConvertTo3Channels()\n", + "resize = Resize(height=img_height, width=img_width)\n", + "\n", + "######################################3\n", + "# 5: Instantiate an encoder that can encode ground truth labels into the format needed by the SSD loss function.\n", + "#########################################\n", + "# The encoder constructor needs the spatial dimensions of the model's predictor layers to create the anchor boxes.\n", + "if config['model']['backend'] == 'ssd300':\n", + " predictor_sizes = [model.get_layer('conv4_3_norm_mbox_conf').output_shape[1:3],\n", + " model.get_layer('fc7_mbox_conf').output_shape[1:3],\n", + " model.get_layer('conv6_2_mbox_conf').output_shape[1:3],\n", + " model.get_layer('conv7_2_mbox_conf').output_shape[1:3],\n", + " model.get_layer('conv8_2_mbox_conf').output_shape[1:3],\n", + " model.get_layer('conv9_2_mbox_conf').output_shape[1:3]]\n", + " ssd_input_encoder = SSDInputEncoder(img_height=img_height,\n", + " img_width=img_width,\n", + " n_classes=n_classes,\n", + " predictor_sizes=predictor_sizes,\n", + " scales=scales,\n", + " aspect_ratios_per_layer=aspect_ratios,\n", + " two_boxes_for_ar1=two_boxes_for_ar1,\n", + " steps=steps,\n", + " offsets=offsets,\n", + " clip_boxes=clip_boxes,\n", + " variances=variances,\n", + " matching_type='multi',\n", + " pos_iou_threshold=0.5,\n", + " neg_iou_limit=0.5,\n", + " normalize_coords=normalize_coords)\n", + "\n", + "elif config['model']['backend'] == 'ssd7':\n", + " predictor_sizes = [model.get_layer('classes4').output_shape[1:3],\n", + " model.get_layer('classes5').output_shape[1:3],\n", + " model.get_layer('classes6').output_shape[1:3],\n", + " model.get_layer('classes7').output_shape[1:3]]\n", + " ssd_input_encoder = SSDInputEncoder(img_height=img_height,\n", + " img_width=img_width,\n", + " n_classes=n_classes,\n", + " predictor_sizes=predictor_sizes,\n", + " scales=scales,\n", + " aspect_ratios_global=aspect_ratios,\n", + " two_boxes_for_ar1=two_boxes_for_ar1,\n", + " steps=steps,\n", + " offsets=offsets,\n", + " clip_boxes=clip_boxes,\n", + " variances=variances,\n", + " matching_type='multi',\n", + " pos_iou_threshold=0.5,\n", + " neg_iou_limit=0.3,\n", + " normalize_coords=normalize_coords)\n", + "\n", + "\n", + "\n", + " \n", + "data_augmentation_chain = DataAugmentationVariableInputSize(resize_height = img_height,\n", + " resize_width = img_width,\n", + " random_brightness=(-48, 48, 0.5),\n", + " random_contrast=(0.5, 1.8, 0.5),\n", + " random_saturation=(0.5, 1.8, 0.5),\n", + " random_hue=(18, 0.5),\n", + " random_flip=0.5,\n", + " n_trials_max=3,\n", + " clip_boxes=True,\n", + " overlap_criterion='area',\n", + " bounds_box_filter=(0.3, 1.0),\n", + " bounds_validator=(0.5, 1.0),\n", + " n_boxes_min=1,\n", + " background=(0,0,0))\n", + "#######################\n", + "# 6: Create the generator handles that will be passed to Keras' `fit_generator()` function.\n", + "#######################\n", + "\n", + "train_generator = train_dataset.generate(batch_size=batch_size,\n", + " shuffle=True,\n", + " transformations= [data_augmentation_chain],\n", + " label_encoder=ssd_input_encoder,\n", + " returns={'processed_images',\n", + " 'encoded_labels'},\n", + " keep_images_without_gt=False)\n", + "\n", + "val_generator = val_dataset.generate(batch_size=batch_size,\n", + " shuffle=False,\n", + " transformations=[convert_to_3_channels,\n", + " resize],\n", + " label_encoder=ssd_input_encoder,\n", + " returns={'processed_images',\n", + " 'encoded_labels'},\n", + " keep_images_without_gt=False)\n", + "\n", + "# Summary instance training\n", + "category_train_list = []\n", + "for image_label in train_dataset.labels:\n", + " category_train_list += [i[0] for i in train_dataset.labels[0]]\n", + "summary_category_training = {train_dataset.classes[i]: category_train_list.count(i) for i in list(set(category_train_list))}\n", + "for i in summary_category_training.keys():\n", + " print(i, ': {:.0f}'.format(summary_category_training[i]))\n", + "\n", + "\n", + "\n", + "# Get the number of samples in the training and validations datasets.\n", + "train_dataset_size = train_dataset.get_dataset_size()\n", + "val_dataset_size = val_dataset.get_dataset_size()\n", + "\n", + "print(\"Number of images in the training dataset:\\t{:>6}\".format(train_dataset_size))\n", + "print(\"Number of images in the validation dataset:\\t{:>6}\".format(val_dataset_size))\n", + "\n", + "\n", + "\n", + "##########################\n", + "# Define model callbacks.\n", + "#########################\n", + "\n", + "# TODO: Set the filepath under which you want to save the model.\n", + "model_checkpoint = ModelCheckpoint(filepath= config['train']['saved_weights_name'],\n", + " monitor='val_loss',\n", + " verbose=1,\n", + " save_best_only=True,\n", + " save_weights_only=False,\n", + " mode='auto',\n", + " period=1)\n", + "#model_checkpoint.best =\n", + "\n", + "csv_logger = CSVLogger(filename='log.csv',\n", + " separator=',',\n", + " append=True)\n", + "\n", + "learning_rate_scheduler = LearningRateScheduler(schedule=lr_schedule,\n", + " verbose=1)\n", + "\n", + "terminate_on_nan = TerminateOnNaN()\n", + "\n", + "callbacks = [model_checkpoint,\n", + " csv_logger,\n", + " learning_rate_scheduler,\n", + " terminate_on_nan]\n", + "\n", + "\n", + "\n", + "batch_images, batch_labels = next(train_generator)\n", + "\n", + "\n", + "initial_epoch = 0\n", + "final_epoch = 100 #config['train']['nb_epochs']\n", + "steps_per_epoch = 50\n", + "\n", + "history = model.fit_generator(generator=train_generator,\n", + " steps_per_epoch=steps_per_epoch,\n", + " epochs=final_epoch,\n", + " callbacks=callbacks,\n", + " validation_data=val_generator,\n", + " validation_steps=ceil(val_dataset_size/batch_size),\n", + " initial_epoch=initial_epoch,\n", + " verbose = 1 if config['train']['debug'] else 2)\n", + "\n", + "history_path = config['train']['saved_weights_name'].split('.')[0] + '_history'\n", + "\n", + "np.save(history_path, history.history)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "dict_keys(['val_loss', 'loss', 'lr'])\n" + ] + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYIAAAEWCAYAAABrDZDcAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAIABJREFUeJzsvXd8XNWd9/8+kka9Ws2Suw1uGGODqQkBQi9LCAQIISHtF7Kb3SzP7iakbbJPNptNnk3d9NBCEhISAiEkAUI31WAb44Z7kS25SLKsLo005fz+OPfM3Llzp0kzI0tz3q+XX6Np994ZS+dzvl1IKTEYDAZD7pI30RdgMBgMhonFCIHBYDDkOEYIDAaDIccxQmAwGAw5jhECg8FgyHGMEBgMBkOOY4TAYIiDEOJ+IcR/JfnaFiHEJeM9jsGQbYwQGAwGQ45jhMBgMBhyHCMEhkmP5ZL5rBBisxBiUAhxrxCiUQjxpBCiXwjxrBCixvb6a4UQbwsheoQQq4UQS2zPrRRCbLDe93ug2HGua4QQG633viaEWD7Ga/6EEGKPEOK4EOLPQohm63EhhPieEKJDCNFrfaZl1nNXCSG2Wdd2SAjxmTF9YQaDAyMEhqnCDcClwELg74AngS8Cdajf838GEEIsBB4E/g9QDzwB/EUIUSiEKAT+BPwamAb8wTou1ntPB+4DPgnUAj8H/iyEKErlQoUQ7wa+AdwENAEHgN9ZT18GvMv6HNXAzUCX9dy9wCellBXAMuD5VM5rMMTCCIFhqvBDKWW7lPIQ8DLwhpTyLSnlCPAosNJ63c3A41LKZ6SUPuDbQAlwHnAO4AG+L6X0SSkfBtbZzvEJ4OdSyjeklAEp5S+BEet9qXArcJ+UcoN1fV8AzhVCzAV8QAWwGBBSyu1SyiPW+3zAUiFEpZSyW0q5IcXzGgyuGCEwTBXabT8Pu9wvt35uRu3AAZBSBoFWYIb13CEZ2YnxgO3nOcC/WW6hHiFEDzDLel8qOK9hALXrnyGlfB74EfBjoF0IcZcQotJ66Q3AVcABIcSLQohzUzyvweCKEQJDrnEYtaADyiePWswPAUeAGdZjmtm2n1uBr0spq23/SqWUD47zGspQrqZDAFLKH0gpzwBOQbmIPms9vk5K+R6gAeXCeijF8xoMrhghMOQaDwFXCyEuFkJ4gH9DuXdeA9YAfuCfhRAFQojrgbNs770b+HshxNlWULdMCHG1EKIixWv4LfBRIcQKK77w3yhXVosQ4kzr+B5gEPACASuGcasQospyafUBgXF8DwZDCCMEhpxCSrkT+CDwQ+AYKrD8d1LKUSnlKHA98BGgGxVP+KPtvetRcYIfWc/vsV6b6jU8B3wZeARlhSwA3m89XYkSnG6U+6gLFccA+BDQIoToA/7e+hwGw7gRZjCNwWAw5DbGIjAYDIYcJ2NCIIS4zyqK2ery3GeEEFIIUZep8xsMBoMhOTJpEdwPXOF8UAgxC1X4czCD5zYYDAZDkmRMCKSULwHHXZ76HnAnYIITBoPBcAJQkM2TCSGuRRXsbIpM1XZ97e3A7QBlZWVnLF68OAtXaDAYDFOHN99885iUsj7R67ImBEKIUuBLqF4qCZFS3gXcBbBq1Sq5fv36DF6dwWAwTD2EEAcSvyq7WUMLgHnAJiFECzAT2CCEmJ7FazAYDAaDg6xZBFLKLajSeEC1DgZWSSmPZesaDAaDwRBNJtNHH0SV7C8SQrQJIT6eqXMZDAaDYexkzCKQUt6S4Pm54zm+z+ejra0Nr9c7nsOc8BQXFzNz5kw8Hs9EX4rBYJiiZDVrKJ20tbVRUVHB3LlzSZSBNFmRUtLV1UVbWxvz5s2b6MsxGAxTlEnbYsLr9VJbWztlRQBACEFtbe2Ut3oMBsPEMmmFAJjSIqDJhc9oMBgmlkktBAaDYYrR8gp07pzoq8g5jBCMkZ6eHn7yk5+k/L6rrrqKnp6eDFyRwTAF+Msd8PJ3Jvoqcg4jBGMklhAEAvGHRj3xxBNUV1dn6rIMhsmNzwu+4Ym+ipxj0mYNTTSf//zn2bt3LytWrMDj8VBeXk5TUxMbN25k27ZtXHfddbS2tuL1ernjjju4/fbbAZg7dy7r169nYGCAK6+8kne+85289tprzJgxg8cee4ySkpIJ/mQGwwQS9Kt/hqwyJYTgq395m22H+9J6zKXNlfzH350S8/lvfvObbN26lY0bN7J69Wquvvpqtm7dGkrzvO+++5g2bRrDw8OceeaZ3HDDDdTW1kYcY/fu3Tz44IPcfffd3HTTTTzyyCN88INm+qAhhwn6IDA60VeRc0wJIYhFUEqkhPy8zGfenHXWWRG5/j/4wQ949NFHAWhtbWX37t1RQjBv3jxWrFgBwBlnnEFLS0vGr9NgOKEJ+iHgm+iryDmmhBDE2rkf6h6md3iUpc1VGb+GsrKy0M+rV6/m2WefZc2aNZSWlnLhhRe61gIUFRWFfs7Pz2d42PhGDTlOwAjBRDClg8V5eRDM0PibiooK+vv7XZ/r7e2lpqaG0tJSduzYweuvv56ZizAYphrGNTQhTAmLIBZ5QljuIZn2wqza2lre8Y53sGzZMkpKSmhsbAw9d8UVV/Czn/2M5cuXs2jRIs4555y0nttgmLIE/UoMDFllygsBqFhBfgYqdH/729+6Pl5UVMSTTz7p+pyOA9TV1bF169bQ45/5zGfSfn0Gw6QiGAQZNK6hCWBqu4astT9T7iGDwZBGdNqoEYKsM6WFQGcLBY0SGAwnPtolZGIEWWdKC4GwuYYMBsMJjrYITEFZ1pnSQpBvXEMGw+QhoF1DxiLINlNaCHSwOGAsAoPhxCfkGjIxgmwztYXAihFIYxIYDCc+Jlg8YUxtIbBcQ4EM6MBY21ADfP/732doaCjNV2QwTHK0AJg6gqwzxYUgc8FiIwQGQ5oJWi3cA6Ng3LlZJWcKytKNvQ31pZdeSkNDAw899BAjIyO8973v5atf/SqDg4PcdNNNtLW1EQgE+PKXv0x7ezuHDx/moosuoq6ujhdeeCHt1zYuNv8BfENwxocn+koMuYbdEgj6Id8zcdeSY0wNIXjy83B0S9TDAsn8kQCFBXmQn6LxM/1UuPKbMZ+2t6F++umnefjhh1m7di1SSq699lpeeuklOjs7aW5u5vHHHwdUD6Kqqiq++93v8sILL1BXV5faNWWDDb8Eb68RAkP2sccGAj4jBFlkSruGBAIhQGbYzHz66ad5+umnWblyJaeffjo7duxg9+7dnHrqqTz77LN87nOf4+WXX6aqKvNdUMeN3wsj7s30DIaMYq8fMCmkWWVqWARxdu4Hj/RRUVzAzJrSjJ1eSskXvvAFPvnJT0Y99+abb/LEE0/whS98gcsuu4yvfOUrGbuOtODzwkh6h/wYDElhFwJTVJZVprRFAFYH0mD6j2tvQ3355Zdz3333MTAwAMChQ4fo6Ojg8OHDlJaW8sEPfpDPfOYzbNiwIeq9JxzGIjBMFMYimDCmhkUQhzyRmWCxvQ31lVdeyQc+8AHOPfdcAMrLy3nggQfYs2cPn/3sZ8nLy8Pj8fDTn/4UgNtvv50rr7ySpqamEy9Y7PeqP0L/CBQUJX69wZAunDECQ9bIASEQGes15GxDfccdd0TcX7BgAZdffnnU+z796U/z6U9/OiPXNG781iS1kX4jBIbsEjRCMFFMfddQnjC9hlLBp4XAxAkMWUbXEYApKssyU18IhGlDnRJ+a26yiRMYsk2Ea8jECLLJpBaCZNJCM+kaygaZTn2NIOAPB+yMEBiyTUSw2FgE2SRjQiCEuE8I0SGE2Gp77FtCiB1CiM1CiEeFENVjPX5xcTFdXV0JF0rlGpqcQiClpKuri+Li4uycUMcHALzGNWTIMkYIJoxMBovvB34E/Mr22DPAF6SUfiHE/wO+AHxuLAefOXMmbW1tdHZ2xn1d77CPgRE/ordkLKeZcIqLi5k5c2Z2TuYfCf9sLAJDtjHpoxNGxoRASvmSEGKu47GnbXdfB9431uN7PB7mzZuX8HU/eG43331mF3u+fiUFqbaZyDV0fABMsNiQfexWgAkWZ5WJXBk/BjwZ60khxO1CiPVCiPWJdv3xKC3MB2DIF0jwSkMoYwiMRWDIPiZ9dMKYECEQQnwJ8AO/ifUaKeVdUspVUspV9fX1Yz5XaaEyeoZGjBAkxG+EwDCB2NNHjRBklawXlAkhPgxcA1wss5ASU1ZkWQSjpndJQowQGCYSkz46YWRVCIQQV6CCwxdIKbMymSVkEYwaiyAhPhMjMEwgpunchJHJ9NEHgTXAIiFEmxDi46gsogrgGSHERiHEzzJ1fo2OEQyOmF+shJisIcNEEjQWwUSRyayhW1wevjdT54uFCRangM4aKqwwQmDIPgFTRzBRTPl8yrIiEyxOGm0RlNUZ15Ah+5iCsgljygtBicdyDZlgcWJ0jKC8wVgEhuxjXEMTxpQXAm0RDJtgcWJ01lBZvRECQ/YJ+kFYS5IpKMsqU14IQsFiYxEkRguBsQgME0HADx5rpKxxDWWVKS8ERQV55AljESSFriwua1Ci4DfmuSGLBP2QX6isAiMEWWXKC4EQgrLCAgZNsDgxfq/6IyypUfeNVWDIJkEf5HuUGJgYQVaZ8kIAUFKYbyqLk8HvhYISKK5U903mkCGbBPyQV6CEwBSUZZWcEIKyogJTWZwMvmE1p7ioQt03FoEhmwQtIcgrMBZBlskJISgdq0XQug4m6VCbMeEfAU+JEQLDxBD0hS0CEyPIKjkjBCnHCI5uhXsvgZaXM3NRJyL+YSgohiLtGjJCYMgiQb8VI/AYIcgyOSIEBXhHR2E0hT53g9YMhKHjmbmoExGf1yEEJkZgyCIBP+RZQmDqCLJKTghBWVE+Fw/8BX6wMnlXz+igurW3Zp7q+L3gKba5howQGLJI0Ad5+UoMTIwgq+SEEJR4ClgwugsGjoIvSatAC4G9NfNUR2cNmRiBYSIIuYZMjCDb5IQQlBXlMz14RN0ZGUjuTb4ctQgKilTAWOQbITBkl4AOFhcYIcgyOSEEpYUFNMsOdSfZxS1Zi6DtTRjuHvvFnUj4vJYICGUVZEoIvH25lY1lSI5gwJY1ZFxD2SQnhKAi30ejsBbrZP3eycQIggG4/yp44+fju8ATBb9VRwAqYJwJIRg6Dt8+GfY8l/5jGyY3urI4z2MKyrJMTghBY7A9fCdpi8ByIcWzCLy9Sih0htFkxz+iYgSgqou9GQgWD3aq76znQPqPbZjchFxDJlicbbI+vH4iqPMfDd8ZTTJGkIxF4O2xbnvHdmEnGr5hlTUElmsoA0KghTWXgvCG5AgGlDWAMDGCLJMTFsG00cPhO+mMEQxrIZgiaZb+EVVHAJmLEWhhTTZ7y5A76PRRU1CWdXJCCKq84xCCXLEIpAxXFoMRAkP2sVcWm4KyrJITQlA+3MaBYIO6k1aLIMUA9IlMwAcymHkh0DMPjGvI4ETHCExBWdbJCSEoGWxjj5xBUBSk1yIYnkIWgf6cnkxbBDpGYCwCgwMdIzAFZVln6guBlBT1t3JQNuD3lI3BIkjGNTQFLAItBCGLoFIt2un+g9TfZyp9nwy5QdCnislMQVnWmfpCMHScfN8ArbKB0fzy1NNH/Um4hkb71W5mMqNdNXYhgPRbBX7jGjLEIOAzBWUTxNQXgp4WAA7KBkbzy1JPH41nEWjXEEz+OIF/RN16rDqCTDWeM8FiQyy0a8gUlGWdqS8E3S0AtMp6RvJKkl/Y9EIVzyLw2oRgsscJ/E6LIEON50wdgSEWEemjxiLIJjkgBKqC9RANDOclGSMIBsJCkKxFMNnjBNoiyLQQhCyCwfQe1zD5cQ6mMf2oskYOCEELlNUjCssZEiXJLWx2t0XcGEEPFJarnye7RaB36B4TIzBMEAFfOGsIOfnjbpOI3BCC6jmUFuUzSGlybah1fKCwPHHWUPUc9fOkjxE4s4Yy5RoyQmBwIRgAZLjXEJiisiwy9YWg5wDUzKW0sIBBkrQItBCU1iqLIJaJOtwNNZYQTHaLIJYQpPtzmToCgxs6OJxvFZSBiRNkkaktBAE/9LRaQpBPvyxWvulEJqfOLCqrU7fafx5xbJ96nbYIJnuMQO/UPbbuo5A5i8DUERjs6LoBnT4K6u/XkBUyJgRCiPuEEB1CiK22x6YJIZ4RQuy2bmsydX4A+g6BDEDNHMoKC+gNWotcosVNL1KlWghc3Bh6p1w9O/L+ZCVkEVjzCDylIPIyFyMIjBgfsCGMtgjyPMoqAGMRZJFMWgT3A1c4Hvs88JyU8mTgOet+5rBSR6mZS0lhPn1Ba5FLVEugXUPaInCLE+hisrI68JRNoRiBJZaZmlJmb9lh4gQGTUgI7BaBEYJskTEhkFK+BBx3PPwe4JfWz78ErsvU+YHw8JPqOZQV5dMdsPzfCS0CSyhKa9Wtm0WgU0eLq6G4KrKmYDISqiwuCj+WiSll9sXfCIFB4xYjMEVlWSPbMYJGKeURAOu2IaNn625RO4zKGZQWFtDttxa5hEJgCxaDu0WgF/6SmsxN88omzspiyMxwmgiLwNQSGCxCMQJPOGvIWARZ44QNFgshbhdCrBdCrO/sHOMoyOIqmHcB5BdQWphPV0gIEixuTteQq0VguYZKtEUw2WMEw1Z5f374sUy4hnzGNWRwIWgPFmshMOmj2SLbQtAuhGgCsG47Yr1QSnmXlHKVlHJVfX392M72jjvgQ38EoLSwgK5Ry/eYqJYg5BqKFyOwuYaKKid/jMDnDaeOajJlEegiPJNCatDoxIF8jy1GYIQgW2RbCP4MfNj6+cPAY9k6cX1FEd2BJF1DviGVMVNiJTW5Zg1p19BUsQi84apiTaaCxSXT1M/GIjBoQq6hfFNQNgFkMn30QWANsEgI0SaE+DjwTeBSIcRu4FLrflaYW1tKP8mmjw6qXateGGNZBIXl6pd2SsQIvOGMIU1hWdhNli58w1BqCaypJTBo7OmjpqAs6xRk6sBSyltiPHVxps4Zj7l1ZaqyGJLLGvKUhhdGtyllw93KLQRhi0BKlXY5GfF7IzOGQAlduhdrvzdsaRnXkEGjd//GNTQhnLDB4nQzs6YEKfLx5RWpQTLxGB1Uu+GQRRDDNVRiCUFRpfpFjjfW8kTH5+IaKrTmN6SzC6Tfa8vGMq4hg4WuIs7LtxWUGSHIFjkjBEUF+TRXlzAsSpN0DZUlsAh6wjvb4ip1O5njBP7h6GCxp1RVZru12BgLAb9yAYRiBMYiMFhEVBZbFoGJEWSNnBECgLm1ZSpOkHKMIEb6qBaAkBBM4jiBfyRaCNKd3aOD7qUmWGxwYE8fNTGCrJNbQlBXSk+gOD0Wgd01NBUsAt9wZDEZqO8Akh/vmfAc1vdoYgQGJ6HKYo+pI5gAcksIasvoCxTjH06ioKywzCp3L4hhEdhcQ6EhLpNYCPwjLsHiUnWbrswhLaiF5WrXZ4TAoAnYew0ZIcg2OSUEc2rLGKAE31CSQgDKKnBaBD6vcnMUTyGLwD/skj5quYbSlTnkt7W6Liw1riFDGNN0bkLJKSHQtQTBRL780YGwEHiKoxcsezEZhHv3T+YYgc8tfTTdriHd2K5YBaKNRWDQ2NNHTdO5rJNTQjBrWimDlJCXbPoouFsEw7aGczBFLAJvdIzAY7mG0hYstk1B85TEtzR2PwvH9qTnvIYTH1fXkLEIskVOCUGxJx8Ky/EE4ixA/lG1O4lnEeiGc9o15CkFkZ+ZfkNHt8Bvboo/Ozkd+F16DYVcQ2mOEXi0RRDHNfTo7bDmR+k5r+HEx9U1ZGIE2SKnhACgsKyKAumLnRvvsw2uB7U4Oi0Cp2tIiMz1GzrwGux+Crr3p//YGiljCEGGsoYKSuK7hoJBGDqe/vYWhhMX0310Qsk5ISgpt9w5sVJI9eITsghKXCwCh2sIMtdvSFsZA+3pP7YmNIvAKQQ6ayjNdQSeYvfvVTPSB0j3Zn+GqYk9fTQvXzV9NAVlWSPnhKC8Si3e/b3O4WkWWgi0f9zNInC6hiBzFoEWrP5MCoEO4jpjBNoiSNPO3GePEZTGHkyjLa5cyCo6vh/evH+ir2LiCdgqi/WtiRFkjZwTgupqVdV6tPOY+wu0G0S7htx2rnqh0kFiyNxMAj07IRsWgTNrqKBQ+WvT5RpyBotjLfTa4sp0XOREYONv4S93mE6sQVsbalC/dwGTNZQtck4I6mvVsJn2YzGmnjldQ64WQY8SAfs0r0xbBJkUAr0gO7OGIL1pnsnWEYQsghxYHLXIDsewUHMFu2sIVDGnsQiyRu4JQZ0Sgu6uLvcXuMYIXFxDdrcQWEKQAYtALxQDMYe5jZ/QTr0o+rnC8mjX0Nt/gnsuTb0rabJ1BNoimMzdXJNFC/1QjgtBwBYsBssiMEKQLXJOCIrLlDunV8cIju+HP/1jeLEfdcsacnENlbgJQSYtgqPpP7bGb8vmcVJYGu0aalsHbWtT/0NNto5Ax2BywiKwft+GYmxMcgU9qtIeIzAFZVkj54SAogoABvusXeemB2HjA3Bkk7qflEXQE5kxBCpGMNof/oVOF6GsoQxaBD5bfr+TwrLoBVsLXqoLtW9Y7fTy8pRFEBhx/768ORQjMK4hRdCnMoXyrCUp3wSLs0lSQiCEuEMIUSkU9wohNgghLsv0xWUESwi8g9Zic+A1ddu1W926xghcLAI31xCkP2CclWCxbafuxM01pD9jqlk9/pGw1RGqWo7R0A9yI300ZBHkuBAEfGG3EFiuIZM+mi2StQg+JqXsAy4D6oGPksV5w2mlsAyJQIz2MzA0BG3r1ePHdqlbN4sg6I/MYBjudnENZajfkHYNDXenb0CMk3hC4HFxDYUsglSFYDhsdejAdKzpb2M5/mTExAgUQX/YLQSWRWCEIFskKwR6EO9VwC+klJtsj00uhMBfUEYFwxzfvTa86zymLYIBtRvR2Qt6cdSvk9LKGophEaQ7TjA6AEXWsTPlHrIHcZ0UlkW7gLxjtAjsje1CFoFLLYG2CAKj6Xe1nWiYGIEi6HdYBB5TUJZFkhWCN4UQT6OE4CkhRAUQzNxlZRZZWEEZXnz7XlEPzD7XJgS2hnNg27lau2bfkPoFdVoEoZkEabQIAn51vtoF6n6mhCBWZTFYMQLHYj0eiyDkGkrCIhjLOSYbJkagCPrDs4rBFJRlmWSF4OPA54EzpZRDgAflHpqUiOIKysUwRYffgNqTYM55qpdPwKcWXp0xBNEWwaBViFZaF3nQTFgEepGoPUndZipOEKuyGNIbLPaPhMVGi63bMYZtQjDVU0hNjEAR8DlcQ6agLJskKwTnAjullD1CiA8C/w5M2p7LBSVVVIkh6o6/payBuoVqR3J8f+QsAoi2CLQQlNVHHjQTMQLtP860EPji1RGUqe9E1wxIOfZgsS9Ji0Cnj47lHCcqq78Jv7kx8rFgMCz2Oe8aCjhcQ6agLJskKwQ/BYaEEKcBdwIHgF9l7KoyjCgq57S8fRQH+mHOO6D2ZPVE1+5o11CURWBVJEcJgeUqSqdFoIVg2nx1mzGLwFbx66SwDGQg7D7ye8N/oKlm9fhdYgRutQTeHiitVT9PFSE4shkOvxX5mN0aynnXkC/SNWQKyrJKskLgl1JK4D3A/0op/xeoyNxlZZiiCsqx/gjnnAt11o772C4lBHqRgrArQ++ah7RF4HANZSJGoHeLJTVqYcy0ELhmDTlcOHahG4tF4HFaBA4hCAaVVVXRbF3bFBGC0f5oa1H//xYUG9eQM33UFJRllWSFoF8I8QXgQ8DjQoh8VJxgcmIt2u2iFqrnKP9+eaOaiDU64IgRWAtWlEXgEIL8ArVoptUisBaOogoon57BYLEX8ovUXAUnzpkE9sVsLDECLTax6ghGegEJFdPdn5+sjAyoAjp7CrCOD1TPVt+vP4d3wK7pozn8fWSZZIXgZmAEVU9wFJgBfCtjV5VprKKyN/yLCOh2OXULwxZBRIzAYREMHlOLmP01muKqyIyX8aKLyYrKobwhszECt4whsAmBtWiNxyLw2y2CGGMwdaC4smls5zhRcRNS7fqrnq1uc9k95JY+auoIskZSQmAt/r8BqoQQ1wBeKeWkjRFQpHb8a4OL6Oy3dmh1J7sLgZtF4LQGNOluPKcXiqIKZbFkaiaBf9jdLQQ2IbAW7BG7EKSY0RNRRxAjWKyFtGKKCYEWdbvr0G4RQG4HjJ3po6ayOKsk22LiJmAtcCNwE/CGEOJ9mbywjGK5ht4ILuFwr7XQ1J6sFqHBzkjXUJRF0BkdKNaku/Gc3kUWVYQtglQ7fiaD3WXjJMo1ZBeCVF1DXpcWEzEsAi0EUyZG4PL96cdCQpDDFkFU+qgpKMsmBYlfAsCXUDUEHQBCiHrgWeDhTF1YRll2A+1Dkt3PzeBwzzCnz65RriEAGUxsEVTOcD9ucVV63TfaIiisUD7zwIhaSJzFbOPFHsR1EsruSYdryOaCyreGlEcJgZU6WmkFi6dC4zkpw4t+hEXgFIIctwicwWITI8gaycYI8rQIWHSl8N4Tj6oZlJ7/KUBwuMdazHTmEIRn9YJ7jCCuayjNweKCErVoljeqxzIRMPaPuNcQQNg6CmUN6QB2ZWoWQTCg/rDtRWvxpr+FgsVToBW1b0htMMARI9BCMEfd5nqMIMo1ZLKGskWyFsHfhBBPAQ9a928GnsjMJWWHimIPFUUFHO6xFviqWeFpZLGyhqS0hCCWa6gyzUIwEIpnUN6gbgeOQv3C9J0DIls/OHFzDeUVqJTWVCwCt+E3bsNpQq6h5sj3TWZGbE373GIEVbPUba67huxWqSkoyyrJBos/C9wFLAdOA+6SUn5urCcVQvyLEOJtIcRWIcSDQogYDurM0lxdwiFtEeTlwzSrp0+Ea6gIEMoi8PYqv2WiGEG6/Pgj/aEMJ8qtHXImLAJ7ENdJoYtrqLhKfUep+O99LkVrbsNpvD1qN6jnPUyFYLG9e6vXxTVUWqtSj3NZCKLSRwtNjCCLJO3ekVI+IqX8Vynlv0gpHx3rCYUQM4B/BlZJKZcB+cD7x3q88dBcXRx2DYHKHIJIIRAiPJMgVp8hTXGVqsJ1NmkbK6MDNiHQFkEGMod8w5FFdHY8zqyhPuUWijd83g23ojVPWfTA4x6QAAAgAElEQVQxdGfX/AK1MEwFIdCxHoiOERQUq89aWmtcQ/k2IcjzKHea7j67/j545XsTc205QFwhEEL0CyH6XP71CyHGkydZAJQIIQqAUuDwOI41ZpqrSxxCYLlcnDUCnmK1o41VTKZJd+O5kX4VKNbHzi/KjBCM9IUFx0lBodqd2V1DxVXKlTQWIXBaBFEtrm3T3zwlU8M1FMsiGLH1tSqtMcHivPzwfS0K2j206few8cHo9xnSQlwhkFJWSCkrXf5VSCkrx3JCKeUh4NvAQeAI0CulfNr5OiHE7UKI9UKI9Z2dnWM5VUKaq0voHvIxNGoFpUIWQXnkCwtKLIsgRp8hTdqFwLZAC5G5WoKR/nDTPDfsvnxvn/qcbot4PEIzD+wxAhcxGbbNg071HCcqETECe/roYPh3rWRabruG3NJH9eOgYmPpLNY0RJD1zB8hRA2qZ9E8oBkoszqaRiClvEtKuUpKuaq+PsbCO05mVKvdaShgvOgquODz0Hx65AujLII0CUHLq7D+F7GftweLASoa028R6G6isSwCiBxX6e1VouE2yzkeIdeQ3SIojR5MM9wdbuBXUDw10ke1RSDyo2MEWghKa3PcInAZVQlKCKRUsbHhnszU0RgmJAX0EmC/lLJTSukD/gicNwHXQXNICKxdaVE5XPQF5Q6xU2C5KEIxglr3A6YqBK//BJ77z9jP24PFoCyCdAeL/V5llhfFsQh0K2oIu4bcMn4SnQciW1nESh+dchaBFSOoaIqOEWihL52W4zGCgGMwjfVz0Ke+P9+QqqOZCjGjE5CJEIKDwDlCiFIhhAAuBrZPwHXQXK0WpYg4gRueYvULOHRM7VadQqHRO9lkO5Ae3692wLHGMdqDxWBVFx9N7tjJ4rU1totFYakjWFwV/k6SxediERSWuriGesPf41SLEVQ2x4kR1CqRzdXcebfh9aBiBPbNj3EPZYSsC4GU8g1URfIGYIt1DXdl+zoAGiuLyRNJCEHIIojTZwhSswikhO4WQLr7hgM+q6bBLgTTlfsgnT1YtGjpa3dDu4YCfrWohSyCVILFbjECh1URDCgfurYIUg1In6joGEGl0yJwxAggcihPLuGWPgrqd92++Rk2QpAJJqQ6WEr5H1LKxVLKZVLKD0kpRxK/K/148vNorCzmcG+CXafe/cYrJoOweyWZXctAR9g/rmcc2LE3nNPoFNLBNAbPR2yVwrHQriG7aHhK0l9HoAXUnjU0FYRgdECJXnF1nBiBFoIcdQ8500e1myjgi4yL5apQZpjJ2yYiTUSlkLqhK44TWQQFheoPPhmLoHt/+Ge3hT0kBLZgsa5APbY78fGTJRnXkN6568+lg8VBf/LWScgisMcISpXfV7vGtIBONdfQSL9a8IurIi2Ckf7IGAHkbsA44HOkj1oWQdAXmSlnXEMZIeeFoKmqOIkYgbUzjdd5VJNsv6HjdiFI0iKYfbbKPNm3OvHxk0WfJ176aGGZcmOEhKAq7OtPNpirB7J4HFlDEN71a7O/xJ41NAWCxTooXFyphE0PoLG3PNeuoVxNIXW6hvJsdQQRFoERgkyQ80Iwo7qEw71egsE4aWkFxeqPduh4+oSguyX8s9su0N6CWlNUATPPTLMQJBMstmIEdjdSvOHzbsSqI7A/p81+u0UwFdJHRywXUJEVhxnpU8IY9EWmj0JuuoakdEkf1ULgV0KgY2XGNZQRcl4ImqtLGPUH+czDm7j3lf2sazmOdOYqe0pgsAOQiYWgKMnGc937wz3341kEhY4FesFFagh6unaOIcsjnkVQqoRA78Z0sBhSsAhi1BFAOFbidVgEUyV9VGd/aavL2xuuy3DGCHLRNaQ7s+a7FZRZFkHdSSDyjGsoQ+S8EFy0qIHzT67jpV2dfO2v27jxZ2vYcNDxy2b3a8eqIdCk4hqqPUkFRpMNFgPMvxCQ0PJy4nMkQ1Lpo2Wqh9Kglcang8WQ/I7dN6x2fPZc8SiLYIrHCIpsQuCMAXlKVQuRXHQN6ThTrPTR/na1aSquMhZBhsh5IZhdW8qvP3426//9Ul6+8yLyBLy82xG8tfu10+Ya2g81c1UDu7gxAke7ixlnKCth7wuJz5EMI33WIuSJ/RrdeK7viLotrow9fD4W/pHoVtfaPx5qX+GMEVhCEAwmd44TFXuMANR3HrIIrO9ACKu6OAeFIOgiBDpGELRcQ+UNaoNgYgQZIeeFwM6saaUsm1HFa3sd5rndIkiHEIz0q8DztHkqCynZGAGoBXvuO9MXJ0jUXgLCi1W/JQRFlbaBPcm6hoajW127WQQFxbYB93oWxCS3CkIxAm0R9IX/f+2uv1ytLg5aRXRurqHRQWUxl09X1rNxDWUEIwQOzl1Qy1sHu8ON6GBsFkG8nig6UFwzT+0C48YIyqOfW3CRsijsAeexMtIfPz4AYSHoO6wWrrz81C0Cnzd6HGZoDKbNItBuIZg6QuCMEYzYhcDW6bZ0Wm7GCHQ1tZtrSG8+yhuUpWhcQxnBCIGD8xbU4QtI1rfYfuG0RSDywsVOsSiuUjuceDtlnToasghiCIGnLDK3WjP/QnWbDqvAm6JFoCuQQ4t0sq6h4UjLyn4M/V3ZO49C+PWTOWAcDKjrd1oEutrY7vrL1Q6kQTchsCyC3jZ1WzHduIYyiBECB2fOraEgT0S6h/SCVVoHeQm+smTaTOhispp5ysIY6or2gzsbztmpW6hGOaYjTjDSF7+GACItAqcQpBIj8DiFwFlH0O2wCPTzk9giGLUt+EVxYgSQw64hK0bg5hrqbVW35Y3GNZRBjBA4KC0sYOXsatbste3S9c40kVsIbEIQp/Hc8f3ql7qkWomLDEabvPaqUydCKPfQ/hfHH0hNxTVkF41UC8p8bhaBIwXV67AItHCk0sriREPv/AvLVcaUpyxOjKDWakI4yYPjqeJmEeQ5LILyRss1ZFpRZwIjBC6cu6COLYd66R22dip691uWIHUUkrQIWpQ1AOGWFU73kLPzqJP5F6pFo31L4muKh7cvsRB4bLvWMVsE3mghKK5UC+SL/w/W3aPcInbXW6rnOBFxBv2LK1VjPbcYQck0tSnItV1vvBhBSAisrCEZiBz9aUgLRghcOG9BLUEJb+yz3EMpWQTWjjaRa2iaJQS6LsEZMNa557FoWGIdqyXxNcUj0XQyiFystGikWlDmG3YJFpfAh/4E0+bD4/8GfYciXUMFU0AI7BYBWAWHVoxA5LknIqSzqeBkwC19VNebDHaqzUFBUXiTkGtCmQWMELiwcnY1RQV54ThByCJQf6itx4fwBWKY74ksgoAPeloTWwQjA/F36uWN6nY8g2qCARiNE4vQFLpYBPke1fcoWf+9fyTaIgCYdSZ89En4wB+UlXPSxeHnQimqk1gIRh31IMWV4RhBYYVy82l0d9l0Dx860XFNH7XN/NC/69ptaDKH0o4RAheKCvI5c+401ux1WgR1tPd5ufg7L/Lg2oPubw4JQYxdS2+rMm9DFoElBFEWQYJsntJataMcz+jKkNsiBYtAfz4hUmsT7ZY1pBECFl4Gtz0GJ18aflxbHVMlRgBhi2C0P/J7BZsQZGAu9YmMm2vI3oBOC4G2Fk3mUNoxQhCDcxfUsrO9n87+kfCCVFbPM9vaGQ0EecvZhkJj7yfjxnFbxhCEXUPO/PF4wWJQaaVlDdA/jollybSXALVT0zs0uxsplV5APm901lAiCqaCReAWI7AsAuf/b1kG5k1MBlyDxfmAZS2FLALjGsoURghicP7Jaqf+/I52qF0Al30dlr6HZ7er3dq2wzGyggqK1AIWSwi6bTUEoGYYFFVFWwSJgsVgja4chxshmRbUGr17tU8yS9kiKEn8OjupFq2diDgLA+0xAqdFUFKjFsOccw25pI8KEb5fYVxDmcYIQQxOnVHF/PoyHnnzkPqlPO+fGMir4LU9XRQW5LG3cwCvL8as4XhtJo7vV0JRPj38WFltZIzAP6KabcULFoM1zH4cboRkWlBrdOaQ3Y3kKU2hoMyljiDhOXX66BSpIwBHjMDx/5uXp+JQgxMgBM/8Bzx0W/bPC+5N5yBshRrXUMYxQhADIQTvO2Mma1uO03JMFf+8vKuT0UCQ9585C39QsqdjwP3NiYSgZm5kYVppXaQ7YCRJ3315Y3osgqKq+K+D8VkEUrqnjyYi1VoFN9rehD99auJy80PZQZZ1U1Slvovh4+5CX1Y/MRZBy8vQtj7754XwhLo8R+NDbRHoTVNhmXqNcQ2lHSMEcbjh9JnkCXj4TZXL/My2dqpLPdx27hwAth2J4R6KJwQdb0P94sjHyuph0BYjSHanXt6gdo9jXeT0NSZjEYSEwJHemYwQhGYRpCgEqWYmubHzcdj4m4kLwOq5xDo7SLvh+o64x4DG6+4bK90tKk41EcVaofRRRzsVLQw6iC6E6TeUIYwQxKGxsph3LaznkQ1tjPqDPL+zg3cvamB+XTmlhfmx4wSxhGCkX/3BTV8W+bjTNRSrBbWTiukq0DbWP4zQMPpUYgRjCBZrsXDWESQi1cwkN/S8W928LNvozqMabVGN9EbHCEBZedkOFo8MKBHweyemr5Nb+iiEXUMVNjeq6TeUEYwQJOB9Z8zkSK+XHz6/m54hH5csbSQvT7B4ekXqFkH7NnXb6BCC0rrI3VisFtROQumGtsyhgB8e+yfo3Bn/vZDcdDLNeFxDel5xqhaBPsd40kf1dzNRQjDqyP6yf9fxXEPZ3Jn3HAj/PBHdT0MxAqcQWDED/XsOKqBuLIK0Y4QgAZcsaaSqxMOPX9hDYX4e71qoisqWNley/Uhf9FhLiCMEW9WtUwjK6tSuSPs+Y42pdBIqKrO5PY7tgrd+DW8/muCTobJXRJ77ztRJyMftCBYnJQR6XvEYhKBgnHOLTziLIIEQlDcoV0k2F7tE87OTIRgjcSKV97oFi/OLIt2RJdUmRpABjBAkoNiTz3tWNBOUqragvEj9si5pqqTf66et22UhLK5SbhenSLRvVc9VzYx8PFRUZv0RxhpT6cStuvj4PnWbrEVQ5KhujUVhmfqjtGf+eIqTEwK9kKeaNQTjn1scsggc9RbHdsNdF4WnrmUKPZ1MYxdS1xiB9X+aTfdQ9zgtgrY34etNkcdJhVD6qEMI8jzq+7D/fhrXUEYwQpAEN62aBcCVy8K+yqVN6g96u5t7qLhKpX860x7b31bWgHPh1c3sdJwgaSFwqUTVdQrHdsV/L1jVy0m4hQDmng9Lrol8LFmLQLu6PElYHk48xWNPHw34w/UZzgW/5RU4vAG2PDS2YyfLyECkZRdhEbh8H7rfUDYDxhGuoTG0wT7wKgRG1O/3WIiZPuoJ1xBoSmqMEGQAIwRJsGxGFc/8y7tCggCwaHoFQsTIHHLrNxQMWkJwSvTrnW0mtFsgUbC4qEItrhEWgRaC3YnN9WRaUGtOuxned1/kY8nu1nUHycrm5M5lp2AcFsFgB2BZZU7XUI/VImTrI2M7drLEjRG4CP1EtJnoblHzLWBsFoG2PvXsgFQJVRY7YgRLr4Vl74t8rKRaBdrH44oyRGGEIElObqwgLy+8ky8tLGBeXZl75pCbEPQcUDtjZ3wAIhvPSQnb/gQNpySX1lne4G4RBEYSdyb19iZ3jlh4SpVZH/DHf50WgupZ8V/neo5xxAi0Oyi/MNo1pBetI5vg2J6xHT8ZnDGCokQWwQS0meg+AE2nqXjRmIRgu7odtxA4LILz/w3O+fvIx5Lp7mtIGSME42BpUyXbjyYpBNpsdhOCkEXQCYc2qMXpzI8ldxHljZGL3PF9UD1b/ZzIPZRMC+p4JDuusrdVFVIVJ1G45naOsaaPaoFsPAX6D0c+13MQak8CBLz9x7EdPxmcMQI9nAbcLb5st5mQUm1Sps1X507VNSRl2CLoGacQOGMEbuh+QyZzKK0YIRgHS5oqaT0+HB5go3HbtbRvBUR4joAdT7HaNQ52wfp71c/Lb07uIuwFSLrF9cmXq/uJAsaJOpwmItmmcD2tY7MGYHzpo1oImlaohcNuWfS0wsyzYM55sOXhzKRr+ketViGO71iLr5tFkO02E4OdyvVWM0c1QEzVIuhtC8eAxmoRxEofdUP3GzKZQ2nFCME4WNqs/qB3OOMERS4dSNu3quZ1haXuByurg649ymd96o3JL9D2fkO6xXXzClWWn0gIkplOFo9kh9P0tkZnSiXLeNJHdepo03J1qzOI/KMqZlA9G5ZdD8d2jj3QGQ9nnyGN/s5jpQdns82EzvSpmTs2IdC/Y9MWhF2AqeI2mCYWpt9QRjBCMA505tCWQw5/pdtMgqNb3QPFmtI62POMypA58+PJX0RFozqPfySyxXX9QrXAxWMkiaE08Uh2lGRvK1SN1SIoHnuweOCoWty0q0xnDvW1AVJZKUveo9pYZCJo7Ow8qolnEcD4e0ilgo4jVWuLIEXXkI4PnHyp2pCMRbRj1RG4YVxDGWFChEAIUS2EeFgIsUMIsV0Ice5EXMd4aago4tQZVfzsxb10DYyEn3DGCEYGVBC38dTYB9MB45lnwfQ4r3NiryWwt7iuXwydu2K7PPwjKqDsiBFsbuthw8Ek/8iSaRPt7VPfw7hcQ+OwCMqnhzNidOaQ9mVXz4byeph/gRKCdLuHElkEsbLCyhuyFyzuaVG31bOhdNoYLIIdKsDddJq633co9WsI+FSgOi+J5ci4hjLCRFkE/wv8TUq5GDgN2D5B1zEuhBB868bl9A37+dKjW8NVxp5iVXylhaBDt5ZIYBFAatYARFYX21tc1y1UqYt9h93f59J5NBCU/MMDG/j8I5uTO3cyoyS133isFoFOHx3LIj1wVFlMuleNDqrr1FF9TctuUAHT/S+O7Rr7j7oPVA9NJ4sRI4hVV5HNNhPdB9TvUGEplExLvfFcxw6oXxR2/Y0lThD0JxcfAOMayhBZFwIhRCXwLuBeACnlqJRy0v6vLp5eyb9cupC/vX2UP2207YaKq8K/rLq1hLPZnJ3GpWphWvqeiIe3H+njHx54M/bsA3veub3Fdf0i9Xgs95BL59EXd3VwqGeYvZ2DjPiTyNNOxiLoGacQhDKTRuK/zg1tEZTUKGHWmUO9rWoHWjlD3T/lvco18td/hdEU3VBSwr2XwR8+Ev2cc16xpqhSCVysLJlstpnoblGfHZRrKOhzFzU3dMZQw5Lw/+9Y4gRBf3TDuVh4itV3Z1xDaWUiLIL5QCfwCyHEW0KIe4QQUVsjIcTtQoj1Qoj1nZ0n9ui+2981nzPm1PCVx97mSK+1KJY3qPbHD9wAmx9SO+94i+G5/wj//FZUh84H1x7kya1HeTtWp1O7RdC9X6UBAtRZQtAZI4XUZTrZb15XO+VAULK3YzD2tWpCMYI4i6feIY7HNQSpZw5Jqb6TCqtFQWVTpEVQ0aSmw4Hy1V/7Qzi+F1b/d+zjPfxx2Lc68vGuvcqa2PNsdD9/57xizZJr4axPxL72dLeZ2PuCuk43eg6ozQPEHpsai75DSuzqF1miKsaWQhrwRbegjofpN5R2JkIICoDTgZ9KKVcCg8DnnS+SUt4lpVwlpVxVX1+f7WtMifw8wXduPA1fIMgPnrOKk278JZz7T6rC9+AaaD4tcU8fl13Ri7vUYuDaygKslgRC7X6P7w/PQi5vUFaJ3SLwDYdnFzhmHrR1D/H8zg4uP0UtQjvbY5zPTmi4fBwffm+rKugqa4j9mniMdW7x0HG1u9WLaoVdCFrDAWTN/AvgjI/Cmh+7D2jp3g9bH4a1d0c+3vKSuvWUwov/E/lcrBjByZfAZV+Lfe3pbjPx8Mdg9TejHw/4oPeQSh0FmxAkGTDu3KFu65coUa2YnnnXEJh+QxlgIoSgDWiTUr5h3X8YJQyTmrl1ZVy0qIEXdnSoWEHdSXDpV+GOTfDJl+C9P0/5mC3HBjnQpXbbMYUg36P+gI9uVrtmPQtZCCtgbAlB/1H4/nJY/Q1139GC+ndrWxHAF69agidfsONoEu6B0CIdxyLoaVW7xWQCgW6MdW6xThUNCcH0cLyk52C0EABc+p8qsPynT0W7oo5sUrd7X4h8bv9L6j3n/yvsfgoOvxV+LlaMIBHpbDMxOqimoelmhHZ621S6cbVTCJK0CPTvlh60VDVrjELgSy5jSGP6DaWdrAuBlPIo0CqEsHwXXAxsy/Z1ZIILF9VztM/LrnbbCEshVEaF1Wdn/7FB3j6cXHm8tgZmVJfEX5jLG+Hg6+pnbRGAChh37lRujcf+SRUpbfmDuu8NWwSj/iC/W9fKRYsamFNbxoL6cnYmIwTJpI/2jqOYDJILSLuhd/86UFzRrB4L+JVLw81NV1wJ13xPWVHONt5aCHyDcOA19bOUsP9lmHc+nHW7ssBe+nb4PbFiBIlIp2tIp8y6CUGPrYYAVNYQJC8EHduV9aKbJlbNHJtrKBhIPkYAxjWUASYqa+jTwG+EEJuBFUAMx+zkQs8qeHFXbJP+S49u4WP3ryMYTJyZ8eKuTubWlnLJkgZ2HOmL/Z7yBrXrg7BFAMp3O3QMXv6OqlGYeZZycXRst8UIqnhmWzvHBkb44DlqZ7h4ekWSQpBEQVlvG1S57L6TJRn3kxt6N223CHyDapGXAXeLAOCkS9SCrhd7zZFNUHuyCjrvfkY91rFdfb/z3qXec86nYMdfVc0IKIsgzwMFRalde3F1+tpM6HTO4ePR/Xl0DYHTNTScrGtoZ+TY1epZ6nypjk4NpGgRlE6bmHGeU5gJEQIp5UbL/79cSnmdlHJKpAA0VZWwqLEitJN34g8E2djaQ3vfCJudRWgOvL4Aa/Z2ccHCepY0VTI4GqC1O8aCqxc7kR+5wOmA8fNfg/kXwk2/AgTseFx1cAQoquCB1w8ws6YkJGSLpldypNdL75CjdYaTgiJ1vFhFRP5RtQsfj0UQz/207l7lxvG6uM2iLIImddtqeSRjXVNeHsw6O2xhgdr5H9kEs89Ru//dT6nH91vxgXnvUrdnf1K5gf76f1QnWWefoWSJ12bC2wcPfTicApsIe/qwLjjUdB9QC7DOniquUr9DyVgEUqoYgc5OA2VlBUZTb48R9KcmBNVz1DnGM8LUEIGpLE4zFy6qZ93+bgZHojty7mofYGhUpWU+/fbRqOftrG/pZtgX4IJFSgggTpxA+5SrZkaa2PUL1W1xFbznJypzZuaZatfq7YP8IvZ1+1izr4tbzppNvtVddfF05dPe2Z7AKhDCmklgLdLHdsPvbg0vzLqCd6ztJcBmdbiIzbp7VGbWfZdHL4wD7WpR1tW7lVoI1qpb7Rd3Y/Y5ynLQQdO+Q2pxbDpN9XHq2qOycFpeVsfR4ltSA9f+AI5shrsuVOKRanxAE6vNxIHXVHfat36T3HHszfa6HULQc0At3jpjR4jk20z0H1EJB3aLYKwppEFfaq4h/X+XrBgaEmKEIM1csLCe0UCQNXuj/5g2tiq/5pzaUp7eFj8Q+OKuDgrz8zhnfi0LGyvIE7DtSIyFWe967W4hUC6Zpe+B634GVdaub/HVcGSj2s0VVfC7da0U5AluXBVerBdpIXDrrOrEPqVsy8NKZLb/Rd0fbw2BPj5Ep4+ODKjPcNKlKvPl7ndD67rw8/1HI4eaVDiEQO+C3Zh1jvVay3rQ8YGmFaqVAsCup5QQaGtAs+x6+PhTasfctm5sFgHEbjOhM3V2PpHccfoOh8XUaREc2x39O5OsEHRYNaARQmD9DqW6QAf8qaWP6pjGWCeiGaIwQpBmzphbQ2lhvqt76K2D3UwrK+Qj581lT8cA+zoHXI6geHFXJ2fNm0ZpYQElhfnMrSuLbm6n0a6hGscfdV6ecgctvir82GJrytie5wgWVfLwm21csqSRhorwGMmmqmIqiguSyxyyTyk7uEbd6kDreOYQaGKljx7ZBDKo8vH/v2fVzv/Bm8MZPQNWMZlGi+XxverxeGMzZ5yufPvaPXRkkypAazxFLZy1J6s0U29vtBAANK+E21fDgndD8xgT4mK1mdBCcHRzcjvvvsOqtqSsPtIiCPjUsZztTJLtN7TtTypeYi+SrB6rRZBi+qiOafQYIUgXRgjSTFFBPuctqGP1ro6owfZvtfawYlY1ly5VC/czMayCwz3D7Gof4IKF4fqJJbFmH0DYNaSLyWJwfHAUX818FTuQAfpkMccHR7nl7MjAqRAihYCx1QIi4Ff593ke2PeCWkx6WwERf/ed8Pgx0kcPvalum09XLrCrv6N2sjseV487LYLCsnA7jUTC5ClRHVztQlC3KNw5duHlltsLNcLTjfJ6+NCjcN2PE39GN2K1mejYHnZF7Xwy8XH6DqmMtZp5kRZB507lz5++PPL1pTWJLYLeNtj4IJx+W7gJHCgXZFFV6imkqaaPljeqDUKiwUuGpDFCkAEuWFRP6/Fh9h8LV+f2DvvY0zHAylnVzKwp5ZTmypjuodU7O0PH0Sy1Zh/0e6MDuG8O1uGVHrbkLYx5TX1eH+/+zmre97M1eE+6AoBDQx5m1pRw/kl1Ua9fNL2Cne39ITHrGhjhofWt/PaNg/x6TQsv7LDcFropXPtWlZVz9ifVDm/HX5VrqLwx9awZO7HSRw9vCDeNA5h/EVTOhLceCFcV2y0CCFsFsTKG7Mw+R53D51VCoJuqQdg9VLcwHHtIN25tJoJBNWxo0dWq7XNSQnBYCcG0+ZFCcHSLunW1CBIIwas/ACS8447o58aSQppq+qgQ6v/QCEHaMEKQAS4MpZGGTfvNbSo+sGK2app12dLpbDjYTUd/dBD0mW1HmVlTwskNYf/ykiblt3e6a7oGRvjUnw+zdOQXPDsQ2yL43dqD9Az52Ha4l8+9rRbCtmEPt5w1O2IEp2bR9Er6vX4O93oZ8Qf40L1rufPhzXzx0S18+bG3+dgv13G01xtuCqf96Wd/Uu0+3350/DUEYEsfdbEI7G6XvHxYeSvsfV65PHxD0YPPtRAkE/+PUYMAACAASURBVLOYdY7aMe9+SgVG7UIw+zzVoO2kS1L/PMmiYxr23XXvQfW5GhbDoitV1pJbxpTGP6LcS5UzlEur71DYdXZ0i/q/qz0p8j3aNaRTQI9uUVXJugfTQAds+CWc9n73/9vqWWHXUMd2+Nn5cHhj/M+aavooqICxcQ2lDSMEGWDWtFJOaijnjxsOhXbUbx3sQQg4bZYSgkuXNiIlPLc9MiDY7/Xx6p4urjhlOsLWkmLx9OghOMGg5N/+sInuIR/VZcUxs4p8gSC/eLWFc+ZP476PnMnTPU0clA0coY4bz3DP6FlsCxh/95ldbDvSx/++fwVvfPFi/vip85AS/rb1SHiU5MHX1YJTPVs1cdv3ouq6Op5AMaj2FM4U1cFjKiA544zI1674ACDh5e+q+06LwCrqS9oiAHjjLnVrF4KCQviHV+HdX072U6SO/mw6uA2q0yeolg6LrlIWw97nYx9Dt93WriFkOMB6dLOKeTiDtKW1qs5Cpxe/9G1Vja4zs9b8SAnkO//V/ZxVM5Vg9bfDb25U59FptrFI1TUEKk7QbbKG0oURggxx+/nz2XKoN7TQb2zt4aT6ciqLlQm8pKmCmTUlUWmkL+zsZDQQ5PJlkYtYU1UxVSWeiMyhu17ex+qdnXz5mqWcu6CWbTGE4PHNRzjS6+UT58/n/JPruf+j5/B++Q02Lfo/NFS6B00XNioheOD1g9z10j5uOWs271kxg8bKYk6fXcPCxnKe2Ho0HCxufUPl34MSAhlQu9HxWgRChOMQmkMb1K1TCGrmwrwLVE8giG0RJCMEZXVqt3zgFXXf6UKpbI49bS4dVM9WwmovbAv19lmkvuuSmvjuIV1DUNkczg7q3q9cZ0c3u8+9sPcbCgZUk73m05Ub5q4LYe09cMr1atqeG1WzVBD9geuVi8lTpgL08Uil+6imeo4SK9OFNC0YIcgQ7z19BnNqS/nuM7sIBiVvHexmpeUWAhWQvXp5Ey/tPsaBrnAs4amtR6krL+L02TURxxNCsKSpgu1H+vAHgtzz8j6+9dROrjp1Oh88ezZLmypp6x6mzxFDkFJy98v7mF+veiEBnD2/lsc/dy3fuPmsmNdfVeKhuaqY53d0MLe2jC9fEzlr+cplTaxrOY5XFKlFou9QeBc9/dSwy2G8FgFED6c59KbK4rHv0jWn36ayicAlRtCc2jXpzzNtQdQAn4wjBMw+V2Vi6YBx5w7lMiqpVi2sT75cua4C0TUrgE0IZoQzyo7vV+4mb29iITj8lmrlcN4/wSeeV8/5hlRfpVjoFNKObfC+XyirI1bnU00gxYIyCGcOmRTStGCEIEN48vP453efzLYjfdz98j66h3ysmBW5uH/sHfPIzxP8dLX6Q/H6Aryws4PLTmkMFXfZWdJUyY6jfVz3k1f5r8e3c+HCer55w3KEEKGxmTsctQZr9nXx9uE+PnH+/IhYQE1ZIcWe+Lnbi5sqyc8TfO/mFZQWRv6hXnVqk0qVH5DhLpvaIhBCWQWQHiEoKIkMFh/eoPLX3XL0F18THl7itAiWXQ+XfT2yGjYeup7ATXCywexzlHtH+8I7tkfm7S+6Uu2IddquE91eorJZWTiF5arnUChQvDz6PfZ+Q3ueA4QKxNedDJ94QbnE4g1YalymRPrK/4FFV6gNQSIhSLWyGMK1BCZOkBaMEGSQ96xoZn5dGd96SnVptFsEAI2Vxdy8ahaPbGjjUM8wr+w+xtBogMtPme52OJY2VeL1BTnaO8KPP3A693x4lc3VpIRgm6Oh3T0v76e2rJD3rkw9hfPOKxZx74dXsWJWddRzCxvLmV9fxt5ua4CNp0wtAprTb1M71lmxrY6k8ZSoRU1K9c8ZKI54bTGs/KCa+FbsuO6yOrW7TdQOXDPnPHXbvHLs1z4e9PkPrAlnDNmF4KRL1Gd89fvu7+87rLrLFlWoz1wzT7mGjm4BhBqG5MTegXTvc+qza3EoKo8vAqAC2Z9vDc9bqJ2vqptHYtfMpFxZDOHqYmMRpAUjBBmkID+POy45GX9QUlqYH/K72/n7CxcgJdz14l7+9vZRKooLOHd+revx/u60Zr7+3mU8968XcPXypohgcmNlEdPKCtluswja+7w8v6ODW8+enXD378bi6ZVcuMh9joAQgquWNdHSZ7ktZq6KnLhVPRtufSi8iIyHU9+nfNVr71I7wKEuVfQVi0v+L3zq9eQX/BgEa+bzxGk/pH3hreM6zpipX6IW+oOvRWYMWXjzSnik9CY1FMctIKtrCDTT5irX0JHNaodfGDUPKiwEx/epupAF7079uu2WmnYRunU/1YzFIiipVnULxiJIC0YIMsw1y5tZPL2Cs+ZNc3X3zKgu4YbTZ/LgulaeevsolyxppLDA/b+l2JPPrWfPoao0evcUiiHYis50wdrfndYc9fp0cOWp0xkKWlO+tD89E7zrTpU7/7cvwCvfU485A8V28j3h+gIbI/4Aq3d2MDyaxBhOlFvtU2/U8octSXbjTDd5eep7PbDGljEUFoLtR/r44uHz6C9sgGf+I7r4TNcQaKbNVwvnkU3u8QGAwnJkfiGjmx9RAf+TLh7fZwgJQRz30FhiBKCsAmMRpAUjBBkmP0/w+0+eyw9vie1e+NRFCwgEJf1ef2hC2FhY2lTJjqP9+AMqWPr0tnbm1qpU1kywtKmS4lLr2GkWgkM9wwzoxn15eXD9z9Ui+Ob9qrVBIheFCz9+fg8f+cU6zvr6s3zx0S1sao3f0/6PG5SPfV9nEmM7M8Xsc6Frt+prBBFCsP/YICMU8kjVh1XcZNtjke91CkHNPJX62dcWWwiEoE9UUti7H1lYoZoUjgdd7d61J/ZrxpI+CipgbCyCtGCEIAtUlXioKI7tA51TW8Z1K2ZQXlQQagU9FpY0VTLqD7L/2CD9Xh9r9h7jMkc9QjoRQjBt/kr2Bxs54/5ezv+f5/nQvW9EZS6lQr/Xx3/9dRvv+p8XuOjbq/nLpsOqFqOoAm55ULkumldG+ZS7B0cjKrmdDI36+dXrBzhr3jQuPaWRP25o4z0/fpWbfr6Gl3Z1RrUDGR4NqDoJYG+cnlB2fIEgr+05RuvxOPMZUkXHCTY9GM4YstCf977+s5VAPPefqjgL1G3/0cj2HvYGczGEQEpJZ0C5jEZnvSN1372TwjKVrdUVwzUUDKoakfzC1I+tLYJU5x8YojBCcILwX9ct48k7zo/KzkmFUMD4SB+rd3biC0guWzp2CyMZLrv2Vp6++EmuO3sRy2dW8/LuYzy8PsWmYxZ/23qEi7/zIve+up/rV86gsbKITz/4Fh/5xToO9QyrHeAnXoAb7ol6778/tpVrf/SKa/tvgIfWtdIz5ONzVyzmuzetYO2XLuEr1yzlYNcQt923lut+/KqqlLZ4ettRBkcDLGmqZG/nYJRQ2HnrYDf/+tBGzvjaM3zgnjf44qNbkv7MHX3emD2nANXxtKBYxUUc2U7aUjnYM8rg+V9S7pdND6onB9oBGW0RaNwyhoCDx4fo8CshaJ2WJiuvdkFsi2D/ajXJbfbZqR+3Zi4ERtIz0jPHMUJwglBSmM+saeMrUFpQX05hfh7bjvTx9LZ2assKWemoR0g3NWWFfPKCBXz5mqX8+AOns3J2NQ+8fiCpCWx2jvZ6+cffvkV9RRGPfuodfOvG0/jTp97BV65ZyvqW43z6t1YRWc2cqCK14dEAz2/voN/r548bokXIHwhyzyv7WTWnhjPmqO+jstjDx945jxfvvJBvXH8qu9oHuPORzaEF/48bDjGjuoT3nzmLgRE/7X0jUccFGBjxc9u9a3l2WzuXLp3OWXOnuTbr+9ZTO/jA3a9HPf7zl/bxiV+tj21FFBTCjFXq5/rIWo59xwYpLVRJAFvLzoOGU2Dt3SpWoEdU2i2CqpmqKWD59HCjQgdv7D/OcVRSw5sFaRolXrsgdoxgw69UYZzuipsKobkExj00XowQTCEKC/I4qaGcza29rN7RwSVL3OsRMslt585h37FBXt17LKX3PbKhjUBQ8uMPnB5KVy3Iz+Nj75zHnVcsZsPBHta3uAdtX9zVwbAvQFWJh/tfa4kSoSe2HqWte5jb3xXdi6moIJ9bzprNF69azEu7Ovn9ulY6+r28vLuT61Y2h/o9xXIP/XFDG/0jfn718bP5zk2ncdHiBjr6R6LcY6/sPsaafV1RTQN1nOKJLUdifzlzzlW3NosgGJS0HBvk4iXK4tt2tB9WfVRVDB/eEK4hqLA1xcvLV8Hb5hUxT/XGvuPsKFjMek7hzf7otOExMW2Bsmic7a2HjqtuscvfP7bGhGYuQdowQjDFWNpcqRacET+XjSPwPFauOrWJaWWF/HpN8n+cwaDkofWtnD1vGnProlMab1w1k+pSDz9/yd3P/MSWo0wrK+Tfr17C3s5BXtkTFiEpJXe9tJf59WVcsiT293Hr2XM4b0Et//X4dn66ei9BCe9dOZMFcYRASskvX2vhtFnVIfFaUK+uf29H+PXBoGR3xwBSwua2cJ2HPxBkq1X38Xg8ITjpEkCoFF2L9n4vw74AZ8+bRl15IdsO98Hym1TLj/W/iGwvYefmX8PV3415qrUtXeyefxvfnfFddrYnFxtJSKwU0s2/V8Hr0z80tuPqViHGIhg3RgimGDpOUOLJ5x0u7aUzTVFBPjefOYtnt7crv34SvLH/OAe6hrj5TPcq5NLCAm47Zw7Pbm+PWpC9vgDP7+jgsqWNXLuimbryIu5/rSX0/NPb2tl6qI/bHZXVTvLyBP/zvuVIKfnFqy0sn1nFSQ3lNFQUUV5UELGwa17Zc4y9nYN85Lzw2MuwcIQD14d6hkMjSjfaMpV2tQ/g9QVZNqOSzW29HOyK4R6afQ7cuS8iwKvjA/Pry8KzKoqrVPX01kegc7uqyC6p4WP3r+MHz+1Wb6w7OTytzsHhnmFajw9z9jw1FW93e3/KLj5XtBDYK4ylVG6hGWeMKQMMUMWD5dPHZxH43V1+uYYRgimGbjVxwcL6MRWRpYNbrUE3v30juT/QP6xvpaKogCuXxe7tf9t5cynMz+OelyN3la/sPsbAiJ8rlk2nqCCfW8+ezfM7Oth/bJBfrWnhH3+zgYWN5VyXRGX1zJpS/v0aVW17vfV6IQQL6svY42IR3P9qC3XlhVx1avi6Z08rxZMvIgRrlzX7OT9P8NbBsBDo1uSfv0L5/uNaBY7CvH1WxtD8unKWNlWy6+gAvkAQVn1MFZ5t+j1UNjNktS350fN7EmYzrd2vXDdnzZvGoukVDI0GkhbzuNTMVW0n7AHjQxtUP6KV0daAlJI1e7v45K/X89W/vB1Kh3Y/9jhSSFvXwf+bq1KScxwjBFOMZTMqmVFdwk1njmNg/DiZWVPKuxc38vt1rYz44xdv9Xl9PLH1CH+3opmSwtjCVVdexA1nzOSRDYfo7A/v4p7cepTK4gLOW6Csn1vPno0nX/DBe97gK4+9zQUL63n4H85LWhTff+YsHvj42dx6TuQuf29HZGrqwa4hnt/ZwQfOmk1RQfjYnvw85tSWsafDLgTq54sWNbCxtScUkN7U1ktlcQHvOKmW02ZVR8UJ4i2A+ztVoLixsoilzZWMBoJKfJpPVxlBgRGobGaP5ZIaDQT59tM74372N/Z3UVFcwJKmylAVvBYxgDV7u/jY/evw+pIryAtRUKjcOPaA8Vu/Um6sZTdEvPSlXZ1c88NXuOXu13ltbxe/eLWFf/jNhtjnrJ6jBKXlleiCunhICU99UYnmE3eqautM8MJ/w10XqU6uJzBGCKYYFcUeXv38u3n34uzHB+x8+Lw5HBsY5aEEqaR/3ngYry/IzasSN6f7xPnz8QWCfO/ZXfgCQUb9QZ7ZdpRLloarsRsqi7lmeTOHeob5p4tO4u7bwv2YkkEIwTtPrsOTH/7TWFBfztE+b7jADfjVmhbyhYgQDM1J9eURFsHu9n6mVxbzroV1HBsYCe2yN7f1sHxmNUIIrjm1iS2HejnQpVJVv/HkdlZ+7ZmYO/J9xwaYV1cW0XBw2+E+1VZj1UfViypnhEToymXTeWzjYba09boeD1Sg+My5qgJ+YaNyce20CcF9r+7n+R0d/OmtQ1HvPTaQwMUyzZZC2nMQtjwMS6+L6OrqDwT59INv0e/1843rT2Xdly7hq9eewrPb27nt3rX0DrvUpyy/GfyjcP/V8IMVan5CvL5Gmu1/gba1cPFXVG3KHz4cHvIT8MGOJ9Sx/vxp+NV16nbLw2rOgh1vn6r63vu8Gtpjp+8IvPq/Kni//S+Rzx3ZBE98NrlrzQJGCAwZ4Z0n1XHm3Bp+8NzumC0dvL4AD649yOLpFSyfWZXwmPPqynj/mbP47RsHueL7L/H9Z3fR5/VHuZS+dt0y/vrpd/KZyxfFjQsky4J6tSjusxZ3ry/AQ+tbuWLZdBpd5jksaCjjYNeQctUAuzr6WTi9gtNmqoDyxtYevL4AO4/2hz73laeqRoN/3niYz/xhMz9/cR/9Xj+v7nbPvtp/bJB5VmB9Xl0ZRQV54cFEp96oJqg1LGZXez+FBXl84/pTmVZWyH8/sd21JqKj38u+Y4OcPU+5oCqKPcyoLmGXlQo7OOLnJWvi3j2v7I+IHTz8Zhtnfv1Zth6KFpmDXUP8ak0Lz3VWMnhkFy9ua4M/fES5ii64M+K1m9p66R32cecVi7jlLNUf68PnzeWHt6zkrdZu/r9frouOWZx8CXxmJ7z3LmV1PP81+OEZ8NZvYheaBXzw7P9VRXjn3QHvu0/FGR77R3j5O/D95fC7W9Sxdj7JQE8nwbf/BI98HL6zEP6zDv6zFr5aA9+cBT85G379XjWNzZ4Z9cr31LkqZyhB0N97MAB/+pTqnfWHj4SLACcQIwSGjCCE4M4rFtPZP8IvXtsf8dzQqJ97Xt7H+f/zAm8f7uMj581Nuvr5v997Knfftgop4Ser91JWmM/5J0cGxcuLClg2I7GwJMtJDVYmkCUET29rp8/r5wNnuQ+4WVBfjj8oOdA1SCAo2d0+wMKGcpY0VVJYkMem1h62HenDH5ShiXUza0pZMaua7z67i0c2tP3/7Z15XJVV/sffX/Z9uQKi7CjiLqKCSppbqWVZZpvaNG1TTYvOr0ZraqapV1Mzr+lXTTPtNf0qm2yybN/dzXIjNUwRBNwRVAQBBYHz++N57oUri4hcbt173q8XL+7zcO59zuHA83nO93wX5k1KITzAm/UtuMzW1jWw92g1yaYQeHl6kBod3FiYyDcY5m6GUXex89BxekUGERbgw90TevNdwRGW/LC/mdnJuj+Q2SThYZ/uQTbPoeW5JdTUNXBtRjz5JZWs2Gk8/ZZXnzLFBT7Zam/ayi+pZNJTK/nTh9vYeqIbgZwg9Kt5RvbY6f+yj3TGMAt5iPEQ0ZRpg3vyl8sGsaGojA82N1+N4BMIQ66G6z+Gm7424iU+/C38cyi8eL5xg351shFjcbLC8Ko6ugsmPWwkSkwYBRMehO0fGdHZkX3g2kVw/3723/Ijgw/+gSuCFlJ30zK44BEjg23WXKNK2wWPwBWvGmJSfQQ+mWfGcRww9h7SZsHYe41VQZFZ5OiHN40a3wMuh/yv4eN5LZu1qo7A1v/CibZToXQGHQ9j1WjOwIhECxP7RvHCil3MzjCS5a3ILeHed7dyuLKG0b268c9rhzKylWyrLSEiXNC/O+NSI3l34z6C/LwcvikebwnE00Nsdv/Fm/YRE+bfar+tuZ3yS6rw8vCgpq6BPt2D8fHyYEDPEDbvPUbPMH8A2yoB4IphsWzZd4xHLxvInJEJbDtQwYYWhGDP0SoaFCRHNuaQ6t8jhC+3FaOUMkTVzxDCvEOVDE80guhmZSbw5ve7+Z//buGhD7eRmdyNmDA/yqpPkbO/nAAfTwb0bDTV9IkO5tv8I9TVN/D5j8VEBPnw0CX9WZFbwsurCpnQtztPfp3LsepakiMD+SLnIAumpNpE3Rob8uW8saRW+sLCV0g79jVk3Ar9pzcb16q8UgbHhhEW0DzdxMxhsby1bjd//XwHkwdEE+jbyq0rLsMQg5zFhikHZaw+Kg7AZ/caKwHxgMQx0Gdy4/uy5hkV7Hqk2aXn/s+KHTQo+GF/Ja8VxnHL2LktXxcMk9c3f4YtiwyxU/Uw9vdG8N7yx2DtM0Zti2WPGrUuZr4GEamw8q+GeMdlQE2F0dddy4zsryi48nUYcFnr1+0EtBBoHMq9k1O56JnV/Gt5Ht6eHjy3Yhd9o4N5YU46wxM7nqLa29ODWZntKDnZCfh4eZBgCWBXSRXF5SdZk1fKneN7t2p2st6gd5VWYm2SYtrc0+LCeHv9HqJD/YkK9iU6tNG0NCcznosGRtMtyAiuyki08PVPhyipOGlXUtTqOprUJOaif88QFm3YS3HFSXqEGiJz/OQp9h87wazu8bZxLLkji1U7S/k2/whrdx1mfeERwgN9CPP35ophve32RlK7B1Nb38CO4uMszy3h8qEx+Hl7ckNWIo99toP/btzLm9/vZs7IBFKjg3lgSQ47D1WSGh1MQ4Pio80HGJMSQWp0MJQZLqRbG5JIHPsnTq/3Vl59ii17j3HnhJQWf6ceHsJDlw5gxnNreXZ5PvOn9G2xndnYiKkYfJX9+X2bqP3uRU7kLuV1r19T/ul2LIE+zM6MN8QnbZZd85q6et7ZsJdJ/aIA4Ymvcrmgf3cSIwI5XFnD3z7fQVp8GLMzzX2i0XfDzq8M2399DaTNbqyklnmrIQAf3G6UcJ31jrGfM+4+o17DuueNLwDESLM+7j5IudAQJwejhUDjUPr1CGH6kJ68vNowD12bEcdDlwxwmmtrR+kVZWwAv5e9jwZlPL23RpCvF9EhfuwqqbTZ41NML5y0uDBe+7aIr7YVMybFPsGgiNhEAAw3ToD1RUeZNrgxMMyabK5p8J11w3j7wQqbEOSZK5imdTBC/LyZNrin3ee1hvV9L68uoLq23rYXc01GPM8szWf+4q10C/ThngtSqamv58EPcvgip5jU6GA2FB1l/7ETzJ9iRkOHJZCX8Si3rArj7wdOMLaPfUbcNfmHaVAwNqX12Jf0+HBmDI3hldWFXDMinvhuradkKSitZNGGvcyfnIqXVdxih/Fu/AM8sGk6kbt9qd65h6raej7ecoCFN2cSEWQf3fxFTjGHK2u5blQifaODmfTkSha8t5UbshJ5YEkOR6pq2bS7rFEIPDzh8hfg+SzD1DP23sYPG34TrH4KdnwCQ65tTKMuApc8Axm/MTKw+oYYiQVbqhXhQPQegcbh3HNhKsMSwnnq6iE8PmPwL04EwLD7Fx2p4t2Ne8lIspDQre1/1N6mcOw8VElMmD9BpiljqFmutKaugbS4tvcxBvQMIcDH02a/t1JQWkVEkA+h/o3eUNayohuLGou555keP1YPoLOld1QQIvDxlgOEB3iTmWwIU4ifty34b8GUvoQGeBMV7MfwhHC+2FYMwAeb9xPg48kF1qSHIkRPuI1SsZC9p3nB+VU7Swn282qxGl5TFkzti5en8KePcqhvI9jtsc928NKqAjYU2V9rTd5hokP8WP+HiWx7ZApv3pRB0ZEqrnrxO7ukgwBvfrebxG4BjOkdQfcQPx68uB/rCo9y28JseoT5cc2IOAoOV9m5MxOeALPfNfYMwpqsWAMsMOJG8Ak2PJWaImIEC0b1M4L9ulgEQAuBpguIswTw3u2juXyo82IbzpVekYGcqlcUHalmZhurgabtd5VWsfPQcbsbcZzFH0ugYQMfHNv2Tc/L04P0+PBmQlB4uIrkCPube5CvF5lJFr4w9wkAcosr8fP2IC68Y8kM/bw9SewWSIOCC/tH25mN7p6YwhNXDrH7XUweEM32gxXkHTrOp1sPMnlAtF023WA/b/p0D2bTbvubs1KKVXmlZPWKaHx6b4XuIX4smNKXFbmlPLDkxxY9oHKLj/PNdsPNc3luo0tnfYNi7a4jnJcSYdvHGJMSyRs3ZlJSUcOVL67lB1OkfjpQwcbdZcwZmWAzAV41PI5fj07kd5P68P7tWVxlimGzfZyEUdD/0uadn/hnmLuledqPnwFaCDSadmBNHeHv7WkXSdwavaOCqKypI/fQcTvTjIjYnnrb4zKbkWQh99BxOx/6giauo02ZOjCagtIq26Z2XslxUqKCz8mF1ipiUwbZ19EO9fdm5rBYu8+21tr+w5IfqThZ12I097CEcDbvOWbnBppfUsnB8pPtrsVx/ehE7hzfm0Ub9vLop83dYZ9fkU+AjydDYkNZtqNRCH7cb7innu5llpFk4a2bMzlRW8/lz63lulfX8fcvd+Dr5WEndCLCny8dwNxJKfh4eTCwZyh+3h7NhLpVPL0gsP2OEV2J04RARDxF5AcR+cRZfdBo2os1lmDqoGibmac97ZVq3B+wcm1GPNePSmjRO+Z0RiRaUAo27TZuNhUnT3G4soakyOZCMHlANCJGtDUYT8YpHTQLWRmZ3I2YMH+yep05b1WcJYCBMSFsKCojIsiXrF7Nb3rp8eEcr6mz7V8ArDTjE8b2aX9urHsu7MOvRyfy6ppCnvgq1yYGe49W8/HWg8zKiGd6Wgz5JZW2HE5r8ozrtJSDa0hcGCt/P577p/blpwMVLM8tZXpazzbnyMfLWLG15Nl1Lhws74S0HmeJM1cEc4HtTry+RtNuQv29eWFOOve15a3ShF5NyoOmniYEF/TvzsPTB7brc4bGh+HtKawvLEMpxbPLjejc1OjgZm2jQvwYFh/O5znFlFefouR4TbNrny03ZCWxev74Vuton84Uc1VwyZAeLZp5rPUgmpqHVucdJjkykNizMGGJCH+a1p+rh8fx7PJd3PLGJo5V1/Liql14CNw8JpmJ/YyaC8t2HLJdp1+PkGabwlYCfb249fxerF4wnievGsKCdsz1iEQLPx2sOKeqfE35aMsBRj2+jIXfd21GVad4DYlILHAx8BfgIGM9KAAACpNJREFUf5zRB43mbJnSRlK804kK9iXY14vK2rpzqhnt5+3JoJhQ1hce4bHPtvPy6kJmZ8ZzfkrLZpQpA6N59NPtfG3ayPucoxAAZ2Vamp4WwydbD9oSD55OQrcALIE+ZO8pY1ZmPBuLjrIm/zA3ZiV2qF9/vWIQqdHBPP75di5+Zg2llTXMHBZrc8tNjgxk6Y4SrhweR/aeMm7MSjrDpxrZbmekt28/KzPJumIrY3yqITz/WpbHW+v22NqEBfgwMtnC6F4RjEy2tFq21poyHeDhj7fRv2cI6Q4uLGXFWSuCp4H5QKtZtUTkNyKyUUQ2lpaWdl3PNJpOQERIjgoiLjygzWR67SEjqRvZe47x8upCrh+VwKOXDWz15jxloPFEbl059Glh5eBI4iwBfDFvLL2jWr6uiJAeH0727jKOVtVy539+IDbcn7smthw/cCZEhBvPS2LxbaMRMTaEbx3by/bziX2jWFdwlOW5JZyqV5zXhntqRxgaH46Xh9j2CfaVVfOPpXlEhfgxJiWCMSkRdAv04e31e7jljY1MenJlq6uHjbvLyNlfwe8npxId6sdvF2bbeyQ5kC5fEYjINKBEKbVJRMa11k4p9RLwEsDw4cM7ISm6RtO1/G5Sytln6myBsSkRvLByFzdmJfHHaf3aTMcRGx7AoJhQftxfTpCvFz1Dm+dCcjbDEsL5Zvshblu4iaNVtbz/29FnlRiwJYbEhfH53DEcLD9pF18xvm8UL68u5MmvduLj5cGIcwhibAl/H08GxYaywRSCp7/JQ0R4YU66LZ4DjOC0ZdtLuP2tbN5YW9Ri4Ny/1xQS6u/NjVlJjEuNZMZza7nr7WwW3pR5Rm+qc8UZK4Is4FIRKQIWARNEZKET+qHROJRxqVFnZU5qjdG9I1g9f/wZRcCKdVVgxAF0banS9pAeb3hNrS88yh+n9eu0vFBW99SmjEi0EOzrRcHhKjISLQ6JYclItLB1Xzk5+8t5P3sfvxqZYCcCYBRsmjqoBxP7RvHKmkK7TLZgbHJ/ua2YWZnx+Pt4MqBnKI/PGMT3BUdtm/+OpMuFQCl1v1IqVimVCFwDLFNKzenqfmg0vyTiLAHtvqlPNYXgXDeKHcXg2DACfTyZNrgHc1pI492ZeHt62NxSO9ssZCUjyUJtfQN3/Ccbf29Pbh/Xq9W2d01M4Vj1qWalXN/4rggR4VejGn8fM9JjWXzbKKYNPveHiTOh4wg0GhcjOTKIBVP6Ovwm21H8fTxZes84nr46rUtWLJMHGm6141LbF6dwtgxPsCACu49Uc/OYZLs0IaeTFhfG+X0ieWV1AdW1xqqgqqaORRv2ctGgHs1WEsMTLV3yO3JqriGl1ApghTP7oNG4Im09lf4ciO7CvYtLBvdgcEyo3d5BZxIa4E1q92CKK05y85gzeyXdPTGFK55fy0urCgjy9eK1b4s4frKuQ55TnYVOOqfRaFwaEXGYCFj5+8whnGpoaNU1tCnDEsI5r3cET3+TBxh7DI9ePpChXeQq2hJaCDQajeYcGdSOdCFNeeiS/iz8fjcz0mNtxYmciRYCjUaj6WJSuge3O7q8K9CbxRqNRuPmaCHQaDQaN0cLgUaj0bg5Wgg0Go3GzdFCoNFoNG6OFgKNRqNxc7QQaDQajZujhUCj0WjcHDm98PPPEREpBTpauy0CONyJ3fml4I7jdscxg3uO2x3HDGc/7gSl1Bmz7f0ihOBcEJGNSqnhzu5HV+OO43bHMYN7jtsdxwyOG7c2DWk0Go2bo4VAo9Fo3Bx3EIKXnN0BJ+GO43bHMYN7jtsdxwwOGrfL7xFoNBqNpm3cYUWg0Wg0mjbQQqDRaDRujksLgYhMEZFcEckXkfuc3R9HICJxIrJcRLaLyDYRmWuet4jI1yKSZ353Xh08ByEiniLyg4h8Yh4nicg6c8zviIiPs/vY2YhImIgsFpEd5pyPcvW5FpHfmX/bOSLytoj4ueJci8i/RaRERHKanGtxbsXgGfPetlVE0s/l2i4rBCLiCTwLTAX6A9eKSH/n9soh1AH3KKX6ASOBO8xx3gcsVUqlAEvNY1djLrC9yfHfgKfMMZcBNzmlV47lH8AXSqm+wBCM8bvsXItIDHA3MFwpNRDwBK7BNef6/4App51rbW6nAinm12+A58/lwi4rBEAGkK+UKlBK1QKLgOlO7lOno5Q6qJTKNl8fx7gxxGCM9XWz2evAZc7poWMQkVjgYuAV81iACcBis4krjjkEGAu8CqCUqlVKHcPF5xqjpK6/iHgBAcBBXHCulVKrgKOnnW5tbqcDbyiD74EwEenR0Wu7shDEAHubHO8zz7ksIpIIDAXWAd2VUgfBEAsgynk9cwhPA/OBBvO4G3BMKVVnHrvifCcDpcBrpknsFREJxIXnWim1H3gC2IMhAOXAJlx/rq20Nreden9zZSGQFs65rK+siAQB7wHzlFIVzu6PIxGRaUCJUmpT09MtNHW1+fYC0oHnlVJDgSpcyAzUEqZNfDqQBPQEAjHMIqfjanN9Jjr1792VhWAfENfkOBY44KS+OBQR8cYQgbeUUu+bpw9Zl4rm9xJn9c8BZAGXikgRhslvAsYKIcw0H4Brzvc+YJ9Sap15vBhDGFx5ricBhUqpUqXUKeB9YDSuP9dWWpvbTr2/ubIQbABSTO8CH4wNpo+c3KdOx7SNvwpsV0o92eRHHwHXm6+vBz7s6r45CqXU/UqpWKVUIsa8LlNKzQaWAzPNZi41ZgClVDGwV0RSzVMTgZ9w4bnGMAmNFJEA82/dOmaXnusmtDa3HwG/Mr2HRgLlVhNSh1BKuewXcBGwE9gFPODs/jhojOdhLAm3ApvNr4swbOZLgTzzu8XZfXXQ+McBn5ivk4H1QD7wLuDr7P45YLxpwEZzvj8Awl19roGHgR1ADvAm4OuKcw28jbEPcgrjif+m1uYWwzT0rHlv+xHDq6rD19YpJjQajcbNcWXTkEaj0WjagRYCjUajcXO0EGg0Go2bo4VAo9Fo3BwtBBqNRuPmaCHQaByMiIyzZkjVaH6OaCHQaDQaN0cLgUZjIiJzRGS9iGwWkRfNegeVIvK/IpItIktFJNJsmyYi35u54Jc0yRPfW0S+EZEt5nt6mR8f1KSOwFtmlKxG87NAC4FGA4hIP+BqIEsplQbUA7MxkpxlK6XSgZXAQ+Zb3gAWKKUGY0R2Ws+/BTyrlBqCkRPHGvY/FJiHURsjGSNfkkbzs8DrzE00GrdgIjAM2GA+rPtjJPhqAN4x2ywE3heRUCBMKbXSPP868K6IBAMxSqklAEqpkwDm561XSu0zjzcDicAaxw9LozkzWgg0GgMBXldK3W93UuSPp7VrKydLW+aemiav69H/e5qfEdo0pNEYLAVmikgU2GrFJmD8j1izXM4C1iilyoEyERljnr8OWKmMOhD7ROQy8zN8RSSgS0eh0XQA/VSi0QBKqZ9E5EHgKxHxwMgAeQdG8ZcBIrIJozrW1eZbrgdeMG/0BcAN5vnrgBdF5BHzM67swmFoNB1CZx/VaNpARCqVUkHO7odG40i0aUij0WjcHL0i0Gg0GjdHrwg0Go3GzdFCoNFoNG6OFgKNRqNxc7QQaDQajZujhUCj0WjcnP8H7qcEV4sozUEAAAAASUVORK5CYII=\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "experimento_ssd7_panel_cell.h5\n" + ] + } + ], + "source": [ + "#Graficar aprendizaje\n", + "\n", + "history_path =config['train']['saved_weights_name'].split('.')[0] + '_history'\n", + "\n", + "hist_load = np.load(history_path + '.npy',allow_pickle=True).item()\n", + "\n", + "print(hist_load.keys())\n", + "\n", + "# summarize history for loss\n", + "plt.plot(hist_load['loss'])\n", + "plt.plot(hist_load['val_loss'])\n", + "plt.title('model loss')\n", + "plt.ylabel('loss')\n", + "plt.xlabel('epoch')\n", + "plt.legend(['train', 'test'], loc='upper left')\n", + "plt.show()\n", + "\n", + "print(config['train']['saved_weights_name'])" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Evaluación del Modelo" + ] + }, + { + "cell_type": "code", + "execution_count": 25, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Processing image set 'train.txt': 100%|██████████| 1/1 [00:00<00:00, 20.74it/s]\n", + "Processing image set 'test.txt': 100%|██████████| 1/1 [00:00<00:00, 25.40it/s]\n", + "Number of images in the evaluation dataset: 1\n", + "\n", + "Producing predictions batch-wise: 100%|██████████| 1/1 [00:00<00:00, 1.50it/s]\n", + "Matching predictions to ground truth, class 1/1.: 100%|██████████| 200/200 [00:00<00:00, 7283.80it/s]\n", + "Computing precisions and recalls, class 1/1\n", + "Computing average precision, class 1/1\n", + "200 instances of class panel with average precision: 0.8982\n", + "mAP using the weighted average of precisions among classes: 0.8982\n", + "mAP: 0.8982\n", + "panel AP 0.898\n", + "\n", + " mAP 0.898\n" + ] + } + ], + "source": [ + "\n", + "config_path = 'config_7_panel.json'\n", + "\n", + "with open(config_path) as config_buffer:\n", + " config = json.loads(config_buffer.read())\n", + "\n", + " \n", + "model_mode = 'training'\n", + "# TODO: Set the path to the `.h5` file of the model to be loaded.\n", + "model_path = config['train']['saved_weights_name']\n", + "\n", + "# We need to create an SSDLoss object in order to pass that to the model loader.\n", + "ssd_loss = SSDLoss(neg_pos_ratio=3, alpha=1.0)\n", + "\n", + "K.clear_session() # Clear previous models from memory.\n", + "\n", + "model = load_model(model_path, custom_objects={'AnchorBoxes': AnchorBoxes,\n", + " 'L2Normalization': L2Normalization,\n", + " 'DecodeDetections': DecodeDetections,\n", + " 'compute_loss': ssd_loss.compute_loss})\n", + "\n", + "\n", + " \n", + "train_dataset = DataGenerator(load_images_into_memory=False, hdf5_dataset_path=None)\n", + "val_dataset = DataGenerator(load_images_into_memory=False, hdf5_dataset_path=None)\n", + "\n", + "# 2: Parse the image and label lists for the training and validation datasets. This can take a while.\n", + "\n", + "\n", + "\n", + "# The XML parser needs to now what object class names to look for and in which order to map them to integers.\n", + "classes = ['background' ] + labels\n", + "\n", + "train_dataset.parse_xml(images_dirs= [config['train']['train_image_folder']],\n", + " image_set_filenames=[config['train']['train_image_set_filename']],\n", + " annotations_dirs=[config['train']['train_annot_folder']],\n", + " classes=classes,\n", + " include_classes='all',\n", + " #classes = ['background', 'panel', 'cell'], \n", + " #include_classes=classes,\n", + " exclude_truncated=False,\n", + " exclude_difficult=False,\n", + " ret=False)\n", + "\n", + "val_dataset.parse_xml(images_dirs= [config['test']['test_image_folder']],\n", + " image_set_filenames=[config['test']['test_image_set_filename']],\n", + " annotations_dirs=[config['test']['test_annot_folder']],\n", + " classes=classes,\n", + " include_classes='all',\n", + " #classes = ['background', 'panel', 'cell'], \n", + " #include_classes=classes,\n", + " exclude_truncated=False,\n", + " exclude_difficult=False,\n", + " ret=False)\n", + "\n", + "#########################\n", + "# 3: Set the batch size.\n", + "#########################\n", + "batch_size = config['train']['batch_size'] # Change the batch size if you like, or if you run into GPU memory issues.\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "evaluator = Evaluator(model=model,\n", + " n_classes=n_classes,\n", + " data_generator=val_dataset,\n", + " model_mode='training')\n", + "\n", + "results = evaluator(img_height=img_height,\n", + " img_width=img_width,\n", + " batch_size=4,\n", + " data_generator_mode='resize',\n", + " round_confidences=False,\n", + " matching_iou_threshold=0.5,\n", + " border_pixels='include',\n", + " sorting_algorithm='quicksort',\n", + " average_precision_mode='sample',\n", + " num_recall_points=11,\n", + " ignore_neutral_boxes=True,\n", + " return_precisions=True,\n", + " return_recalls=True,\n", + " return_average_precisions=True,\n", + " verbose=True)\n", + "\n", + "mean_average_precision, average_precisions, precisions, recalls = results\n", + "total_instances = []\n", + "precisions = []\n", + "\n", + "for i in range(1, len(average_precisions)):\n", + " \n", + " print('{:.0f} instances of class'.format(len(recalls[i])),\n", + " classes[i], 'with average precision: {:.4f}'.format(average_precisions[i]))\n", + " total_instances.append(len(recalls[i]))\n", + " precisions.append(average_precisions[i])\n", + "\n", + "if sum(total_instances) == 0:\n", + " \n", + " print('No test instances found.')\n", + "\n", + "else:\n", + "\n", + " print('mAP using the weighted average of precisions among classes: {:.4f}'.format(sum([a * b for a, b in zip(total_instances, precisions)]) / sum(total_instances)))\n", + " print('mAP: {:.4f}'.format(sum(precisions) / sum(x > 0 for x in total_instances)))\n", + "\n", + " for i in range(1, len(average_precisions)):\n", + " print(\"{:<14}{:<6}{}\".format(classes[i], 'AP', round(average_precisions[i], 3)))\n", + " print()\n", + " print(\"{:<14}{:<6}{}\".format('','mAP', round(mean_average_precision, 3)))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Cargar nuevamente el modelo desde los pesos.\n", + "Predicción" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Training on: \t{'panel': 1}\n", + "\n" + ] + } + ], + "source": [ + "from imageio import imread\n", + "from keras.preprocessing import image\n", + "import time\n", + "\n", + "config_path = 'config_7_panel.json'\n", + "input_path = ['panel_jpg/Mision_1/', 'panel_jpg/Mision_2/']\n", + "output_path = 'result_ssd7_panel_cell/'\n", + "\n", + "with open(config_path) as config_buffer:\n", + " config = json.loads(config_buffer.read())\n", + "\n", + "makedirs(output_path)\n", + "###############################\n", + "# Parse the annotations\n", + "###############################\n", + "score_threshold = 0.8\n", + "score_threshold_iou = 0.3\n", + "labels = config['model']['labels']\n", + "categories = {}\n", + "#categories = {\"Razor\": 1, \"Gun\": 2, \"Knife\": 3, \"Shuriken\": 4} #la categoría 0 es la background\n", + "for i in range(len(labels)): categories[labels[i]] = i+1\n", + "print('\\nTraining on: \\t' + str(categories) + '\\n')\n", + "\n", + "img_height = config['model']['input'] # Height of the model input images\n", + "img_width = config['model']['input'] # Width of the model input images\n", + "img_channels = 3 # Number of color channels of the model input images\n", + "n_classes = len(labels) # Number of positive classes, e.g. 20 for Pascal VOC, 80 for MS COCO\n", + "classes = ['background'] + labels\n", + "\n", + "model_mode = 'training'\n", + "# TODO: Set the path to the `.h5` file of the model to be loaded.\n", + "model_path = config['train']['saved_weights_name']\n", + "\n", + "# We need to create an SSDLoss object in order to pass that to the model loader.\n", + "ssd_loss = SSDLoss(neg_pos_ratio=3, alpha=1.0)\n", + "\n", + "K.clear_session() # Clear previous models from memory.\n", + "\n", + "model = load_model(model_path, custom_objects={'AnchorBoxes': AnchorBoxes,\n", + " 'L2Normalization': L2Normalization,\n", + " 'DecodeDetections': DecodeDetections,\n", + " 'compute_loss': ssd_loss.compute_loss})\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "image_paths = []\n", + "for inp in input_path:\n", + " if os.path.isdir(inp):\n", + " for inp_file in os.listdir(inp):\n", + " image_paths += [inp + inp_file]\n", + " else:\n", + " image_paths += [inp]\n", + "\n", + "image_paths = [inp_file for inp_file in image_paths if (inp_file[-4:] in ['.jpg', '.png', 'JPEG'])]\n", + "times = []\n", + "\n", + "\n", + "for img_path in image_paths:\n", + " orig_images = [] # Store the images here.\n", + " input_images = [] # Store resized versions of the images here.\n", + " #print(img_path)\n", + "\n", + " # preprocess image for network\n", + " orig_images.append(imread(img_path))\n", + " img = image.load_img(img_path, target_size=(img_height, img_width))\n", + " img = image.img_to_array(img)\n", + " input_images.append(img)\n", + " input_images = np.array(input_images)\n", + " # process image\n", + " start = time.time()\n", + " y_pred = model.predict(input_images)\n", + " y_pred_decoded = decode_detections(y_pred,\n", + " confidence_thresh=score_threshold,\n", + " iou_threshold=score_threshold_iou,\n", + " top_k=200,\n", + " normalize_coords=True,\n", + " img_height=img_height,\n", + " img_width=img_width)\n", + "\n", + "\n", + " #print(\"processing time: \", time.time() - start)\n", + " times.append(time.time() - start)\n", + " # correct for image scale\n", + "\n", + " # visualize detections\n", + " # Set the colors for the bounding boxes\n", + " colors = plt.cm.brg(np.linspace(0, 1, 21)).tolist()\n", + "\n", + " plt.figure(figsize=(20,12))\n", + " plt.imshow(orig_images[0],cmap = 'gray')\n", + "\n", + " current_axis = plt.gca()\n", + " #print(y_pred)\n", + " for box in y_pred_decoded[0]:\n", + " # Transform the predicted bounding boxes for the 300x300 image to the original image dimensions.\n", + "\n", + " xmin = box[2] * orig_images[0].shape[1] / img_width\n", + " ymin = box[3] * orig_images[0].shape[0] / img_height\n", + " xmax = box[4] * orig_images[0].shape[1] / img_width\n", + " ymax = box[5] * orig_images[0].shape[0] / img_height\n", + "\n", + " color = colors[int(box[0])]\n", + " label = '{}: {:.2f}'.format(classes[int(box[0])], box[1])\n", + " current_axis.add_patch(plt.Rectangle((xmin, ymin), xmax-xmin, ymax-ymin, color=color, fill=False, linewidth=2))\n", + " current_axis.text(xmin, ymin, label, size='x-large', color='white', bbox={'facecolor':color, 'alpha':1.0})\n", + "\n", + " #plt.figure(figsize=(15, 15))\n", + " #plt.axis('off')\n", + " save_path = output_path + img_path.split('/')[-1]\n", + " plt.savefig(save_path)\n", + " plt.close()\n", + " \n", + "file = open(output_path + 'time.txt','w')\n", + "\n", + "file.write('Tiempo promedio:' + str(np.mean(times)))\n", + "\n", + "file.close()\n", + "print('Tiempo Total: {:.3f}'.format(np.sum(times)))\n", + "print('Tiempo promedio por imagen: {:.3f}'.format(np.mean(times)))\n", + "print('OK')" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "panel : 69\n", + "cell : 423\n" + ] + } + ], + "source": [ + "\n", + "# Summary instance training\n", + "category_train_list = []\n", + "for image_label in train_dataset.labels:\n", + " category_train_list += [i[0] for i in train_dataset.labels[0]]\n", + "summary_category_training = {train_dataset.classes[i]: category_train_list.count(i) for i in list(set(category_train_list))}\n", + "for i in summary_category_training.keys():\n", + " print(i, ': {:.0f}'.format(summary_category_training[i]))\n" + ] + }, + { + "cell_type": "code", + "execution_count": 28, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "__________________________________________________________________________________________________\n", + "Layer (type) Output Shape Param # Connected to \n", + "==================================================================================================\n", + "input_1 (InputLayer) (None, 400, 400, 3) 0 \n", + "__________________________________________________________________________________________________\n", + "identity_layer (Lambda) (None, 400, 400, 3) 0 input_1[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv1 (Conv2D) (None, 400, 400, 32) 2432 identity_layer[0][0] \n", + "__________________________________________________________________________________________________\n", + "bn1 (BatchNormalization) (None, 400, 400, 32) 128 conv1[0][0] \n", + "__________________________________________________________________________________________________\n", + "elu1 (ELU) (None, 400, 400, 32) 0 bn1[0][0] \n", + "__________________________________________________________________________________________________\n", + "pool1 (MaxPooling2D) (None, 200, 200, 32) 0 elu1[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2 (Conv2D) (None, 200, 200, 48) 13872 pool1[0][0] \n", + "__________________________________________________________________________________________________\n", + "bn2 (BatchNormalization) (None, 200, 200, 48) 192 conv2[0][0] \n", + "__________________________________________________________________________________________________\n", + "elu2 (ELU) (None, 200, 200, 48) 0 bn2[0][0] \n", + "__________________________________________________________________________________________________\n", + "pool2 (MaxPooling2D) (None, 100, 100, 48) 0 elu2[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv3 (Conv2D) (None, 100, 100, 64) 27712 pool2[0][0] \n", + "__________________________________________________________________________________________________\n", + "bn3 (BatchNormalization) (None, 100, 100, 64) 256 conv3[0][0] \n", + "__________________________________________________________________________________________________\n", + "elu3 (ELU) (None, 100, 100, 64) 0 bn3[0][0] \n", + "__________________________________________________________________________________________________\n", + "pool3 (MaxPooling2D) (None, 50, 50, 64) 0 elu3[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv4 (Conv2D) (None, 50, 50, 64) 36928 pool3[0][0] \n", + "__________________________________________________________________________________________________\n", + "bn4 (BatchNormalization) (None, 50, 50, 64) 256 conv4[0][0] \n", + "__________________________________________________________________________________________________\n", + "elu4 (ELU) (None, 50, 50, 64) 0 bn4[0][0] \n", + "__________________________________________________________________________________________________\n", + "pool4 (MaxPooling2D) (None, 25, 25, 64) 0 elu4[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv5 (Conv2D) (None, 25, 25, 48) 27696 pool4[0][0] \n", + "__________________________________________________________________________________________________\n", + "bn5 (BatchNormalization) (None, 25, 25, 48) 192 conv5[0][0] \n", + "__________________________________________________________________________________________________\n", + "elu5 (ELU) (None, 25, 25, 48) 0 bn5[0][0] \n", + "__________________________________________________________________________________________________\n", + "pool5 (MaxPooling2D) (None, 12, 12, 48) 0 elu5[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv6 (Conv2D) (None, 12, 12, 48) 20784 pool5[0][0] \n", + "__________________________________________________________________________________________________\n", + "bn6 (BatchNormalization) (None, 12, 12, 48) 192 conv6[0][0] \n", + "__________________________________________________________________________________________________\n", + "elu6 (ELU) (None, 12, 12, 48) 0 bn6[0][0] \n", + "__________________________________________________________________________________________________\n", + "pool6 (MaxPooling2D) (None, 6, 6, 48) 0 elu6[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv7 (Conv2D) (None, 6, 6, 32) 13856 pool6[0][0] \n", + "__________________________________________________________________________________________________\n", + "bn7 (BatchNormalization) (None, 6, 6, 32) 128 conv7[0][0] \n", + "__________________________________________________________________________________________________\n", + "elu7 (ELU) (None, 6, 6, 32) 0 bn7[0][0] \n", + "__________________________________________________________________________________________________\n", + "classes4 (Conv2D) (None, 50, 50, 12) 6924 elu4[0][0] \n", + "__________________________________________________________________________________________________\n", + "classes5 (Conv2D) (None, 25, 25, 12) 5196 elu5[0][0] \n", + "__________________________________________________________________________________________________\n", + "classes6 (Conv2D) (None, 12, 12, 12) 5196 elu6[0][0] \n", + "__________________________________________________________________________________________________\n", + "classes7 (Conv2D) (None, 6, 6, 12) 3468 elu7[0][0] \n", + "__________________________________________________________________________________________________\n", + "boxes4 (Conv2D) (None, 50, 50, 16) 9232 elu4[0][0] \n", + "__________________________________________________________________________________________________\n", + "boxes5 (Conv2D) (None, 25, 25, 16) 6928 elu5[0][0] \n", + "__________________________________________________________________________________________________\n", + "boxes6 (Conv2D) (None, 12, 12, 16) 6928 elu6[0][0] \n", + "__________________________________________________________________________________________________\n", + "boxes7 (Conv2D) (None, 6, 6, 16) 4624 elu7[0][0] \n", + "__________________________________________________________________________________________________\n", + "classes4_reshape (Reshape) (None, 10000, 3) 0 classes4[0][0] \n", + "__________________________________________________________________________________________________\n", + "classes5_reshape (Reshape) (None, 2500, 3) 0 classes5[0][0] \n", + "__________________________________________________________________________________________________\n", + "classes6_reshape (Reshape) (None, 576, 3) 0 classes6[0][0] \n", + "__________________________________________________________________________________________________\n", + "classes7_reshape (Reshape) (None, 144, 3) 0 classes7[0][0] \n", + "__________________________________________________________________________________________________\n", + "anchors4 (AnchorBoxes) (None, 50, 50, 4, 8) 0 boxes4[0][0] \n", + "__________________________________________________________________________________________________\n", + "anchors5 (AnchorBoxes) (None, 25, 25, 4, 8) 0 boxes5[0][0] \n", + "__________________________________________________________________________________________________\n", + "anchors6 (AnchorBoxes) (None, 12, 12, 4, 8) 0 boxes6[0][0] \n", + "__________________________________________________________________________________________________\n", + "anchors7 (AnchorBoxes) (None, 6, 6, 4, 8) 0 boxes7[0][0] \n", + "__________________________________________________________________________________________________\n", + "classes_concat (Concatenate) (None, 13220, 3) 0 classes4_reshape[0][0] \n", + " classes5_reshape[0][0] \n", + " classes6_reshape[0][0] \n", + " classes7_reshape[0][0] \n", + "__________________________________________________________________________________________________\n", + "boxes4_reshape (Reshape) (None, 10000, 4) 0 boxes4[0][0] \n", + "__________________________________________________________________________________________________\n", + "boxes5_reshape (Reshape) (None, 2500, 4) 0 boxes5[0][0] \n", + "__________________________________________________________________________________________________\n", + "boxes6_reshape (Reshape) (None, 576, 4) 0 boxes6[0][0] \n", + "__________________________________________________________________________________________________\n", + "boxes7_reshape (Reshape) (None, 144, 4) 0 boxes7[0][0] \n", + "__________________________________________________________________________________________________\n", + "anchors4_reshape (Reshape) (None, 10000, 8) 0 anchors4[0][0] \n", + "__________________________________________________________________________________________________\n", + "anchors5_reshape (Reshape) (None, 2500, 8) 0 anchors5[0][0] \n", + "__________________________________________________________________________________________________\n", + "anchors6_reshape (Reshape) (None, 576, 8) 0 anchors6[0][0] \n", + "__________________________________________________________________________________________________\n", + "anchors7_reshape (Reshape) (None, 144, 8) 0 anchors7[0][0] \n", + "__________________________________________________________________________________________________\n", + "classes_softmax (Activation) (None, 13220, 3) 0 classes_concat[0][0] \n", + "__________________________________________________________________________________________________\n", + "boxes_concat (Concatenate) (None, 13220, 4) 0 boxes4_reshape[0][0] \n", + " boxes5_reshape[0][0] \n", + " boxes6_reshape[0][0] \n", + " boxes7_reshape[0][0] \n", + "__________________________________________________________________________________________________\n", + "anchors_concat (Concatenate) (None, 13220, 8) 0 anchors4_reshape[0][0] \n", + " anchors5_reshape[0][0] \n", + " anchors6_reshape[0][0] \n", + " anchors7_reshape[0][0] \n", + "__________________________________________________________________________________________________\n", + "predictions (Concatenate) (None, 13220, 15) 0 classes_softmax[0][0] \n", + " boxes_concat[0][0] \n", + " anchors_concat[0][0] \n", + "==================================================================================================\n", + "Total params: 193,120\n", + "Trainable params: 192,448\n", + "Non-trainable params: 672\n", + "__________________________________________________________________________________________________\n" + ] + } + ], + "source": [ + "\n", + "\n", + "\n", + "model.summary()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.6.8" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/README.md b/README.md new file mode 100644 index 0000000..e424002 --- /dev/null +++ b/README.md @@ -0,0 +1,2 @@ +# Rentadrone_MachineLearning +Photovoltaic fault detector diff --git a/Technical Document (Milestone 2).pdf b/Technical Document (Milestone 2).pdf new file mode 100644 index 0000000..a965c94 Binary files /dev/null and b/Technical Document (Milestone 2).pdf differ diff --git a/config_7_panel.json b/config_7_panel.json new file mode 100644 index 0000000..9265f16 --- /dev/null +++ b/config_7_panel.json @@ -0,0 +1,28 @@ +{ + "model" : { + "backend": "ssd7", + "input": 400, + "labels": ["panel"] + }, + + "train": { + "train_image_folder": "Train&Test_A/images", + "train_annot_folder": "Train&Test_A/anns", + "train_image_set_filename": "Train&Test_A/train.txt", + + "train_times": 1, + "batch_size": 8, + "learning_rate": 1e-4, + "nb_epochs": 10, + "warmup_epochs": 3, + "saved_weights_name": "experimento_ssd7_panel.h5", + "debug": true + }, + + +"test": { + "test_image_folder": "Train&Test_A/images", + "test_annot_folder": "Train&Test_A/anns", + "test_image_set_filename": "Train&Test_A/test.txt" + } +} diff --git a/config_7_panel_2.json b/config_7_panel_2.json new file mode 100644 index 0000000..e43c11e --- /dev/null +++ b/config_7_panel_2.json @@ -0,0 +1,28 @@ +{ + "model" : { + "backend": "ssd7", + "input": 400, + "labels": ["panel"] + }, + + "train": { + "train_image_folder": "Train&Test_A/images", + "train_annot_folder": "Train&Test_A/anns", + "train_image_set_filename": "Train&Test_A/train.txt", + + "train_times": 1, + "batch_size": 8, + "learning_rate": 1e-4, + "nb_epochs": 10, + "warmup_epochs": 3, + "saved_weights_name": "experimento_ssd7_panel_2.h5", + "debug": true + }, + + +"test": { + "test_image_folder": "Train&Test_A/images", + "test_annot_folder": "Train&Test_A/anns", + "test_image_set_filename": "Train&Test_A/test.txt" + } +} diff --git a/config_7_panel_cell.json b/config_7_panel_cell.json new file mode 100644 index 0000000..b533377 --- /dev/null +++ b/config_7_panel_cell.json @@ -0,0 +1,28 @@ +{ + "model" : { + "backend": "ssd7", + "input": 400, + "labels": ["panel", "cell"] + }, + + "train": { + "train_image_folder": "Train&Test_A/images", + "train_annot_folder": "Train&Test_A/anns_cell", + "train_image_set_filename": "Train&Test_A/train.txt", + + "train_times": 1, + "batch_size": 8, + "learning_rate": 1e-4, + "nb_epochs": 10, + "warmup_epochs": 3, + "saved_weights_name": "experimento_ssd7_panel_cell.h5", + "debug": true + }, + + +"test": { + "test_image_folder": "Train&Test_A/images", + "test_annot_folder": "Train&Test_A/anns_cell", + "test_image_set_filename": "Train&Test_A/test.txt" + } +} diff --git a/experimento_ssd7_panel.h5 b/experimento_ssd7_panel.h5 new file mode 100644 index 0000000..e05b49c Binary files /dev/null and b/experimento_ssd7_panel.h5 differ diff --git a/experimento_ssd7_panel_2.h5 b/experimento_ssd7_panel_2.h5 new file mode 100644 index 0000000..9f6446c Binary files /dev/null and b/experimento_ssd7_panel_2.h5 differ diff --git a/experimento_ssd7_panel_2_history.npy b/experimento_ssd7_panel_2_history.npy new file mode 100644 index 0000000..f157082 Binary files /dev/null and b/experimento_ssd7_panel_2_history.npy differ diff --git a/experimento_ssd7_panel_cell.h5 b/experimento_ssd7_panel_cell.h5 new file mode 100644 index 0000000..5a73873 Binary files /dev/null and b/experimento_ssd7_panel_cell.h5 differ diff --git a/experimento_ssd7_panel_cell_history.npy b/experimento_ssd7_panel_cell_history.npy new file mode 100644 index 0000000..f0dc138 Binary files /dev/null and b/experimento_ssd7_panel_cell_history.npy differ diff --git a/experimento_ssd7_panel_history.npy b/experimento_ssd7_panel_history.npy new file mode 100644 index 0000000..cf56166 Binary files /dev/null and b/experimento_ssd7_panel_history.npy differ diff --git a/log.csv b/log.csv new file mode 100644 index 0000000..c0554fc --- /dev/null +++ b/log.csv @@ -0,0 +1,421 @@ +epoch,loss,val_loss +0,5.510118139551041,6.669028917948405 +1,4.620036688890863,6.239988009134929 +2,4.226249254420999,6.841040261586508 +3,4.091210664586818,7.686105759938558 +4,3.622026850568487,6.607742118835449 +5,3.5433121865300645,8.279837512969971 +6,3.512152446711317,6.4912719090779625 +7,3.3289545355959143,7.4758354187011715 +8,3.482765424027839,7.232888158162435 +9,3.3542860036200666,6.111873690287272 +10,3.4707188764785197,6.265982087453207 +11,3.2180146499869013,5.951403490702311 +12,2.963566080052802,4.999087079366048 +13,3.117887996612711,5.038201649983724 +14,3.055175361300921,6.263498719533285 +15,2.7987984666164887,4.605638074874878 +16,2.9557558849770973,5.938434664408366 +17,2.8580517257506344,6.350362237294515 +18,2.9886046946048737,5.7530917485555015 +19,2.8870468539126377,5.349153010050456 +20,2.7189624239228687,5.969705327351888 +21,2.6918416213481984,3.5491503715515136 +22,2.861170437107695,5.07493953704834 +23,2.623182365466059,6.592086855570475 +24,2.770815874667878,5.301957639058431 +25,2.5428910356886845,5.037519264221191 +26,2.5331496147304056,4.154387172063192 +27,2.5904381918780346,5.361591466267904 +28,2.6647130751863437,4.339076677958171 +29,2.671634988554681,5.207049465179443 +30,2.536868430832599,5.206181303660075 +31,2.5269303959100804,4.604189443588257 +32,2.547324141292725,4.899435075124105 +33,2.5510170735577318,4.646349271138509 +34,2.537044236951686,4.370001490910848 +35,2.5675039841086873,4.40106611251831 +36,2.4225192929201937,5.272506109873453 +37,2.461898976818044,4.241414705912272 +38,2.450672066563256,4.3455554962158205 +39,2.536351051736385,3.543430789311727 +40,2.3972248679145856,4.24670573870341 +41,2.4373818038296124,4.144773292541504 +42,2.2674986729596522,4.5872970422108965 +43,2.5475314259529114,3.156279468536377 +44,2.3489086305168296,4.8856459299723305 +45,2.1580938357622066,6.214142862955729 +46,2.2294212690693267,4.281853596369426 +47,2.20053460674695,3.505253299077352 +48,2.46075110581327,4.718129396438599 +49,2.3233440803720598,4.173482211430867 +50,2.211711230929991,4.688928604125977 +51,2.3972729593515396,4.368849531809489 +52,2.173910693919405,4.594769461949666 +53,2.195787996133595,4.749962457021078 +54,2.261986894493407,3.4480560143788654 +55,2.236172522953216,4.448630460103353 +56,2.0949010884154577,3.0846354484558107 +57,2.1858557294657888,3.625709597269694 +58,2.1987350253348654,4.172247505187988 +59,2.1886363528049664,4.9714948813120525 +60,2.069128413466697,4.544129991531372 +61,2.19367531512646,4.284995698928833 +62,2.156055927915803,2.9886295636494955 +63,2.1244417771379998,3.436731783548991 +64,2.1488721364356103,5.231652641296387 +65,2.1220993420393794,4.221103874842326 +66,2.0596070790544467,3.37458758354187 +67,2.011380082115214,4.809928719202677 +68,2.0250811225287717,4.709945408503215 +69,2.1433624923229218,3.6902766863505048 +70,2.307024818785647,4.168658208847046 +71,2.2973661013646676,4.074279165267944 +72,2.285527411927568,3.939052104949951 +73,2.1036225490747613,3.133694537480672 +74,2.0302315761832066,3.193118604024251 +75,2.094067890593346,4.039541578292846 +76,2.020727533609309,4.289553165435791 +77,2.1503505093162882,4.287316306432088 +78,2.076847677218153,3.273712158203125 +79,2.083538164800786,3.439055665334066 +80,1.9649358757699142,3.39263014793396 +81,1.9769666695848425,3.4040281772613525 +82,1.9312713729574325,3.335324780146281 +83,1.8709337883916042,3.216568962732951 +84,1.8324604976050398,3.3595632712046304 +85,1.876919298412952,3.115814781188965 +86,1.8610911781602506,3.2011043548583986 +87,1.8309650478210855,3.115077273050944 +88,1.96009569155409,3.0843749205271402 +89,1.8725523018645218,3.046114110946655 +90,1.936869632056419,3.1930816491444904 +91,1.799514335520724,3.2744102319081625 +92,1.8225869218401871,3.1622254053751626 +93,1.7391467281478517,3.34275107383728 +94,1.734298254898254,3.153507947921753 +95,1.8810681564878202,3.004853868484497 +96,2.000249590328399,3.0633230368296305 +97,1.7435078662126622,3.0148994445800783 +98,1.8868340608581462,3.0801260471343994 +99,1.8617350918815492,2.8029932022094726 +0,9.425875493884087,7.424945831298828 +1,5.931236448287964,5.740046977996826 +2,5.158609371185303,12.504448890686035 +3,5.445521764755249,6.512359619140625 +4,4.72419921875,9.225855827331543 +0,7.662772455215454,6.400893211364746 +1,5.811632056236267,7.0526018142700195 +2,4.759837512969971,4.526348114013672 +3,5.562472891807556,8.413644790649414 +4,4.6102127575874325,3.6775052547454834 +5,4.026118251085282,4.329113960266113 +6,3.9567205572128294,4.930643558502197 +0,4.291947777271271,5.641242027282715 +0,9.57666729927063,14.748775482177734 +1,6.334289512634277,13.631682395935059 +2,5.614185876846314,8.741722106933594 +3,5.0502364492416385,8.882115364074707 +4,4.925040850639343,10.044334411621094 +5,4.650737538337707,6.040557861328125 +6,3.997062907218933,5.663703441619873 +7,4.054195647239685,3.106687068939209 +8,3.7936658334732054,4.129853248596191 +9,3.575504515171051,3.2800419330596924 +10,4.157241787910461,3.997321605682373 +11,4.036867890357971,3.4753036499023438 +12,3.762564401626587,4.309584617614746 +13,3.465549476146698,4.396946907043457 +14,3.6985153460502627,8.012784957885742 +15,3.4447268199920655,3.4260730743408203 +16,3.3104467177391053,3.4645779132843018 +17,3.1926121377944945,5.626831531524658 +18,2.794120237827301,3.3249411582946777 +19,3.5080946278572083,9.333157539367676 +20,2.8098262190818786,2.224599599838257 +21,2.9234915828704833,2.5261638164520264 +22,2.9251465916633608,4.887867450714111 +23,2.710484461784363,1.8793960809707642 +24,2.8781397986412047,3.6699490547180176 +25,2.7713977098464966,3.505333185195923 +26,3.1688988184928895,2.180227756500244 +27,2.9556050205230715,6.320026874542236 +28,3.160018289089203,1.785821557044983 +29,2.686514904499054,3.313776731491089 +30,3.1641182923316955,3.073194742202759 +31,2.692085440158844,6.541172504425049 +32,2.316413254737854,2.803773880004883 +33,2.2647949409484864,2.268798589706421 +34,3.1685371685028074,4.1838202476501465 +35,2.800750479698181,3.2680280208587646 +36,2.555520939826965,2.4116203784942627 +37,2.1758973479270933,2.2294368743896484 +38,2.4307467484474183,2.0943284034729004 +39,2.1297872924804686,1.659180998802185 +40,2.6208885717391968,1.5405529737472534 +41,2.1338503682613372,1.5205631256103516 +42,2.658788948059082,4.433087348937988 +43,2.505739734172821,1.3420426845550537 +44,2.4486756539344787,1.7022470235824585 +45,3.22581095457077,2.586212158203125 +46,2.6756594610214233,2.9213602542877197 +47,2.521214907169342,2.639474868774414 +48,2.349372446537018,2.3526482582092285 +49,2.3571006381511688,3.9680111408233643 +50,2.628913962841034,1.7524428367614746 +51,2.1133799719810487,1.575929880142212 +52,2.2293547689914703,5.258865833282471 +53,2.3119247043132782,2.6193485260009766 +54,2.517700080871582,2.916719675064087 +55,2.216511501073837,2.2744908332824707 +56,1.968552553653717,1.4098974466323853 +57,2.4100059497356416,6.245255947113037 +58,2.364307676553726,2.6863951683044434 +59,1.9538081681728363,2.4814934730529785 +60,2.263806539773941,1.3604397773742676 +61,1.9277082800865173,1.488633632659912 +62,2.2858369982242586,11.130377769470215 +63,2.8886038279533386,5.278674602508545 +64,2.477577738761902,3.475400686264038 +65,2.219870334863663,1.6822925806045532 +66,2.4032550179958343,1.728459358215332 +67,2.594639081954956,1.358062982559204 +68,2.2139855778217314,1.8900867700576782 +69,1.6617299699783326,1.5494791269302368 +70,2.0888832986354826,2.976024627685547 +71,2.1289234411716462,3.2035162448883057 +72,2.405316809415817,3.2074801921844482 +73,2.4194243466854095,4.110093116760254 +74,2.3310585188865662,4.0915937423706055 +75,2.1582571136951447,10.787525177001953 +76,2.341229839324951,3.5869884490966797 +77,2.133217837810516,1.334718942642212 +78,2.1182785034179688,1.6548967361450195 +79,2.025664346218109,2.5942630767822266 +80,2.1342422437667845,1.5209053754806519 +81,1.8549052095413208,1.5490634441375732 +82,1.6819430363178254,1.313148021697998 +83,1.694944336414337,1.391322135925293 +84,1.6461822605133056,1.1783668994903564 +85,2.0633348596096037,1.405306100845337 +86,2.2056054270267484,1.3352552652359009 +87,1.5451392793655396,1.3712495565414429 +88,1.8744987392425536,1.1776152849197388 +89,1.465874629020691,1.3135547637939453 +90,1.7581356024742127,1.467374324798584 +91,1.8051641368865967,1.372241497039795 +92,1.6639659988880158,1.4512510299682617 +93,1.9951361322402954,1.2248538732528687 +94,1.663730491399765,1.3945097923278809 +95,1.3763303506374358,1.4558112621307373 +96,2.1172481882572174,1.4779417514801025 +97,1.8102217328548431,1.5256366729736328 +98,1.7072846734523772,1.4526740312576294 +99,1.5981937515735627,1.581385612487793 +0,9.237408475875855,13.042949676513672 +1,5.6629617881774905,7.048020839691162 +0,23.77001953125,32.4190673828125 +1,20.413426399230957,23.459339141845703 +2,17.22719669342041,25.96030044555664 +3,14.69414472579956,26.508224487304688 +4,21.56411075592041,20.510215759277344 +0,8.43229079246521,11.622157096862793 +1,6.371858034133911,6.254767894744873 +2,5.518871655464173,7.682528972625732 +3,4.937838315963745,6.595365047454834 +4,4.6957590818405155,7.44390869140625 +5,4.49819194316864,5.136931896209717 +6,3.836543617248535,4.881785869598389 +7,3.783361053466797,6.287789344787598 +8,3.313195538520813,3.6483280658721924 +9,3.9447092390060425,7.186314105987549 +10,3.637809398174286,3.7272136211395264 +11,3.4883266425132753,9.002508163452148 +12,3.4079463982582094,3.9624993801116943 +13,3.332717876434326,4.13992977142334 +14,3.579555072784424,2.4605770111083984 +15,3.421144573688507,3.3089683055877686 +16,3.302432632446289,5.191346645355225 +17,3.23605500459671,2.3363630771636963 +18,2.9914412808418276,2.5331664085388184 +19,3.0008352732658388,7.282768249511719 +20,3.208008966445923,6.066461563110352 +21,2.9904596257209777,3.6934595108032227 +22,3.182333447933197,2.9090158939361572 +23,2.6183705949783325,2.2821738719940186 +24,2.5780122327804564,2.1042346954345703 +25,3.139271137714386,6.351105690002441 +26,3.2859340596199034,5.58103609085083 +27,2.21459983587265,1.8972580432891846 +28,2.5487705159187315,2.203923463821411 +29,2.542244575023651,2.201089382171631 +30,2.9696013140678406,2.7623047828674316 +31,2.967882194519043,2.801649570465088 +32,2.941064665317535,3.1724624633789062 +33,2.606772041320801,2.5099551677703857 +34,3.4118495774269104,2.794415235519409 +35,2.873078236579895,2.492004156112671 +36,2.2796417045593262,2.062519073486328 +37,2.5083087062835694,1.6880069971084595 +38,2.643176691532135,2.2986221313476562 +39,2.490724952220917,1.819265604019165 +40,2.5202382016181946,2.391141414642334 +41,2.3219135713577272,1.8772228956222534 +42,3.0185955119132997,3.138643741607666 +43,2.3882989263534546,3.3792498111724854 +44,2.4145547580718993,1.723459243774414 +45,2.1839211487770083,1.508792519569397 +46,2.5080027890205385,2.001650094985962 +47,2.42613968372345,2.419433116912842 +48,2.9007816219329836,6.887621879577637 +49,2.5338677048683165,1.9110087156295776 +50,2.621233103275299,2.374685287475586 +51,2.3940469145774843,3.1991775035858154 +52,2.342935199737549,1.9444864988327026 +53,2.3797137784957885,2.6466474533081055 +54,2.446777708530426,2.7477736473083496 +55,2.4911719727516175,1.652651071548462 +56,2.267404410839081,1.7446259260177612 +57,2.659196581840515,1.7238589525222778 +58,2.689990527629852,1.5614198446273804 +59,2.574430241584778,3.448568344116211 +60,2.1016169714927675,1.6746673583984375 +61,2.145503945350647,1.5988365411758423 +62,2.5953175687789916,2.497501850128174 +63,2.2437212228775025,1.5059239864349365 +64,2.0190137434005737,1.5328168869018555 +65,2.6917251467704775,2.402193546295166 +66,2.442579984664917,8.209683418273926 +67,2.313585152626038,3.5338404178619385 +68,2.5556651830673216,2.4172115325927734 +69,2.3033766174316406,1.7683649063110352 +70,2.3846949481964113,1.6575692892074585 +71,2.357002168893814,2.053514003753662 +72,2.23988007068634,2.0087666511535645 +73,2.2341379857063295,1.9294345378875732 +74,2.4975681281089783,1.3832440376281738 +75,2.108108333349228,1.5080796480178833 +76,2.8153503131866455,1.8350507020950317 +77,2.058976174592972,1.527866005897522 +78,2.133276641368866,2.1388957500457764 +79,2.4273571288585662,2.012916088104248 +80,2.261022378206253,1.288000226020813 +81,1.97671404838562,1.2927175760269165 +82,1.8011221516132354,1.3257331848144531 +83,2.1121877253055574,1.327056646347046 +84,1.5005958354473115,1.3455957174301147 +85,2.023282461166382,1.3236554861068726 +86,1.666312210559845,1.31097412109375 +87,1.7993268489837646,1.2905677556991577 +88,1.9744508647918702,1.4180703163146973 +89,1.8957570743560792,1.3413660526275635 +90,1.7796542680263518,1.3274427652359009 +91,1.7336869478225707,1.4139866828918457 +92,1.8910833168029786,1.4077646732330322 +93,2.2750667834281924,1.452025055885315 +94,2.141851006746292,1.4157276153564453 +95,1.9786755788326262,1.5630687475204468 +96,2.0814486956596374,1.4718821048736572 +97,1.627004109621048,1.4020427465438843 +98,1.7745650565624238,1.4115678071975708 +99,1.6112149834632874,1.309640645980835 +0,13.240895519256592,9.980749130249023 +1,9.886444272994995,11.145248413085938 +2,8.806032466888428,8.300601959228516 +3,7.499856824874878,8.938425064086914 +4,7.472698106765747,7.951210021972656 +5,6.881255331039429,11.254376411437988 +6,6.477477426528931,6.909292697906494 +7,6.975805439949036,8.699749946594238 +8,6.153874192237854,10.958646774291992 +9,5.9306713962554936,8.4360990524292 +10,5.389483208656311,5.979596138000488 +11,5.088858094215393,5.9283246994018555 +12,5.791591567993164,6.770644187927246 +13,5.301032276153564,7.891006946563721 +14,4.9872572708129885,6.038945198059082 +15,5.466398530006408,6.412474155426025 +16,6.009402070045471,9.29183292388916 +17,5.173672623634339,7.6805925369262695 +18,5.270827403068543,7.109560489654541 +19,5.476474528312683,5.492109775543213 +20,4.651697566509247,6.603340148925781 +21,5.143176574707031,5.654886245727539 +22,5.482992310523986,5.875793933868408 +23,5.336576428413391,5.387059211730957 +24,5.718867363929749,8.075957298278809 +25,6.0929483222961425,12.616324424743652 +26,5.223886513710022,9.853631973266602 +27,5.441392493247986,6.494979381561279 +28,5.443574357032776,9.000237464904785 +29,4.9779653024673465,4.9992523193359375 +30,4.964519805908203,5.661161422729492 +31,4.5981699657440185,5.208276748657227 +32,4.31012220621109,6.48082971572876 +33,4.425176796913147,10.94719409942627 +34,4.499751558303833,7.125434398651123 +35,4.895158166885376,7.044617652893066 +36,4.986830451488495,9.325119018554688 +37,4.89181571483612,5.168903350830078 +38,4.557168745994568,4.983942031860352 +39,4.472172594070434,5.713285446166992 +40,4.941359806060791,5.584297180175781 +41,4.585730526447296,5.188381671905518 +42,4.709441497325897,6.754541397094727 +43,5.042821569442749,5.269125938415527 +44,4.984173431396484,6.511160373687744 +45,4.910832777023315,6.0670084953308105 +46,4.683715043067932,5.8351054191589355 +47,5.104213237762451,5.177797794342041 +48,4.131224229335785,5.960608959197998 +49,4.537304904460907,5.435113906860352 +50,4.895492758750915,6.031461715698242 +51,4.944548580646515,5.719876766204834 +52,3.9747779250144957,5.597424030303955 +53,4.878332374095916,8.605620384216309 +54,4.1648838376998905,6.004228591918945 +55,4.899721660614014,9.129816055297852 +56,4.443343172073364,7.115135192871094 +57,4.582749195098877,5.435602188110352 +58,3.94369323015213,4.7926249504089355 +59,4.093871185779571,5.709817886352539 +60,5.1152237296104435,5.207928657531738 +61,4.295820953845978,4.9239301681518555 +62,3.824081053733826,4.54429817199707 +63,4.725213901996613,5.944464683532715 +64,4.445547094345093,4.8326334953308105 +65,4.405443170070648,5.64410400390625 +66,4.416481246948242,6.815909385681152 +67,5.041842558383942,4.850806713104248 +68,4.151236450672149,5.405308723449707 +69,4.619697303771972,5.282440185546875 +70,4.280664870738983,5.59921932220459 +71,4.536840713024139,6.520687580108643 +72,4.05976989030838,5.242108345031738 +73,4.486078248023987,5.418185234069824 +74,4.526286170482636,4.377415180206299 +75,3.8465312123298645,4.580915927886963 +76,4.049466667175293,4.974483966827393 +77,4.600880959033966,13.498869895935059 +78,4.668659925460815,6.448976039886475 +79,4.529668383598327,8.047800064086914 +80,4.2662005877494815,5.792871475219727 +81,4.104831688404083,4.611735820770264 +82,3.9904953455924987,4.554204940795898 +83,4.312863912582397,4.46755313873291 +84,3.7951492261886597,4.468851566314697 +85,4.3618169379234315,4.404786586761475 +86,4.353838121891021,4.683189868927002 +87,4.207552857398987,4.479642391204834 +88,4.132238986492157,4.546168327331543 +89,4.4995465350151065,4.566031455993652 +90,4.265286402702332,4.526498317718506 +91,4.370239367485047,4.527574062347412 +92,3.7339816904067993,4.543874740600586 +93,4.025301923751831,4.324979305267334 +94,4.025380687713623,4.627725124359131 +95,3.9856660056114195,4.295327663421631 +96,3.674951643943787,4.563712120056152 +97,3.7434568858146666,4.392282009124756 +98,4.092953279018402,4.400970935821533 +99,3.898336341381073,4.445141792297363