tensorflow2
This commit is contained in:
BIN
keras-yolo3-master/utils/__pycache__/__init__.cpython-37.pyc
Normal file
BIN
keras-yolo3-master/utils/__pycache__/__init__.cpython-37.pyc
Normal file
Binary file not shown.
BIN
keras-yolo3-master/utils/__pycache__/bbox.cpython-37.pyc
Normal file
BIN
keras-yolo3-master/utils/__pycache__/bbox.cpython-37.pyc
Normal file
Binary file not shown.
BIN
keras-yolo3-master/utils/__pycache__/colors.cpython-37.pyc
Normal file
BIN
keras-yolo3-master/utils/__pycache__/colors.cpython-37.pyc
Normal file
Binary file not shown.
BIN
keras-yolo3-master/utils/__pycache__/image.cpython-37.pyc
Normal file
BIN
keras-yolo3-master/utils/__pycache__/image.cpython-37.pyc
Normal file
Binary file not shown.
Binary file not shown.
BIN
keras-yolo3-master/utils/__pycache__/utils.cpython-37.pyc
Normal file
BIN
keras-yolo3-master/utils/__pycache__/utils.cpython-37.pyc
Normal file
Binary file not shown.
@@ -9,7 +9,7 @@ class BoundBox:
|
||||
self.ymin = ymin
|
||||
self.xmax = xmax
|
||||
self.ymax = ymax
|
||||
|
||||
|
||||
self.c = c
|
||||
self.classes = classes
|
||||
|
||||
@@ -19,14 +19,14 @@ class BoundBox:
|
||||
def get_label(self):
|
||||
if self.label == -1:
|
||||
self.label = np.argmax(self.classes)
|
||||
|
||||
|
||||
return self.label
|
||||
|
||||
|
||||
def get_score(self):
|
||||
if self.score == -1:
|
||||
self.score = self.classes[self.get_label()]
|
||||
|
||||
return self.score
|
||||
|
||||
return self.score
|
||||
|
||||
def _interval_overlap(interval_a, interval_b):
|
||||
x1, x2 = interval_a
|
||||
@@ -41,49 +41,51 @@ def _interval_overlap(interval_a, interval_b):
|
||||
if x2 < x3:
|
||||
return 0
|
||||
else:
|
||||
return min(x2,x4) - x3
|
||||
return min(x2,x4) - x3
|
||||
|
||||
def bbox_iou(box1, box2):
|
||||
intersect_w = _interval_overlap([box1.xmin, box1.xmax], [box2.xmin, box2.xmax])
|
||||
intersect_h = _interval_overlap([box1.ymin, box1.ymax], [box2.ymin, box2.ymax])
|
||||
|
||||
intersect_h = _interval_overlap([box1.ymin, box1.ymax], [box2.ymin, box2.ymax])
|
||||
|
||||
intersect = intersect_w * intersect_h
|
||||
|
||||
w1, h1 = box1.xmax-box1.xmin, box1.ymax-box1.ymin
|
||||
w2, h2 = box2.xmax-box2.xmin, box2.ymax-box2.ymin
|
||||
|
||||
|
||||
union = w1*h1 + w2*h2 - intersect
|
||||
|
||||
|
||||
if union == 0: return 0
|
||||
|
||||
return float(intersect) / union
|
||||
|
||||
def draw_boxes(image, boxes, labels, obj_thresh, quiet=True):
|
||||
for box in boxes:
|
||||
label_str = ''
|
||||
label = -1
|
||||
|
||||
|
||||
for i in range(len(labels)):
|
||||
if box.classes[i] > obj_thresh:
|
||||
if label_str != '': label_str += ', '
|
||||
label_str += (labels[i] + ' ' + str(round(box.get_score()*100,0)) + '%')
|
||||
label = i
|
||||
if not quiet: print(label_str)
|
||||
|
||||
|
||||
if label >= 0:
|
||||
text_size = cv2.getTextSize(label_str, cv2.FONT_HERSHEY_SIMPLEX, 1.1e-4 * image.shape[0], 2)
|
||||
width, height = text_size[0][0], text_size[0][1]
|
||||
region = np.array([[box.xmin-3, box.ymin],
|
||||
[box.xmin-3, box.ymin-height-16],
|
||||
[box.xmin+width+6, box.ymin-height-16],
|
||||
[box.xmin+width+6, box.ymin]], dtype='int32')
|
||||
region = np.array([[box.xmin-3, box.ymin],
|
||||
[box.xmin-3, box.ymin-height-16],
|
||||
[box.xmin+width+6, box.ymin-height-16],
|
||||
[box.xmin+width+6, box.ymin]], dtype='int32')
|
||||
|
||||
cv2.rectangle(img=image, pt1=(box.xmin,box.ymin), pt2=(box.xmax,box.ymax), color=get_color(label), thickness=1)
|
||||
cv2.fillPoly(img=image, pts=[region], color=get_color(label))
|
||||
cv2.putText(img=image,
|
||||
text=label_str,
|
||||
org=(box.xmin+6, box.ymin - 6),
|
||||
fontFace=cv2.FONT_HERSHEY_SIMPLEX,
|
||||
fontScale=0.7e-3 * image.shape[0],
|
||||
color=(0,0,0),
|
||||
cv2.putText(img=image,
|
||||
text=label_str,
|
||||
org=(box.xmin+6, box.ymin - 6),
|
||||
fontFace=cv2.FONT_HERSHEY_SIMPLEX,
|
||||
fontScale=0.7e-3 * image.shape[0],
|
||||
color=(0,0,0),
|
||||
thickness=2)
|
||||
|
||||
return image
|
||||
|
||||
return image
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
from keras.layers import Lambda, concatenate
|
||||
from keras.models import Model
|
||||
from tensorflow.keras.layers import Lambda, concatenate
|
||||
from tensorflow.keras.models import Model
|
||||
import tensorflow as tf
|
||||
|
||||
def multi_gpu_model(model, gpus):
|
||||
@@ -59,4 +59,4 @@ def multi_gpu_model(model, gpus):
|
||||
for name, outputs in zip(model.output_names, all_outputs):
|
||||
merged.append(concatenate(outputs,
|
||||
axis=0, name=name))
|
||||
return Model(model.inputs, merged)
|
||||
return Model(model.inputs, merged)
|
||||
|
||||
@@ -3,6 +3,7 @@ import numpy as np
|
||||
import os
|
||||
from .bbox import BoundBox, bbox_iou
|
||||
from scipy.special import expit
|
||||
import tensorflow as tf
|
||||
|
||||
def _sigmoid(x):
|
||||
return expit(x)
|
||||
@@ -166,18 +167,30 @@ def do_nms(boxes, nms_thresh):
|
||||
if bbox_iou(boxes[index_i], boxes[index_j]) >= nms_thresh:
|
||||
boxes[index_j].classes[c] = 0
|
||||
|
||||
def decode_netout(netout, anchors, obj_thresh, net_h, net_w):
|
||||
grid_h, grid_w = netout.shape[:2]
|
||||
def decode_netout(netout_old, anchors, obj_thresh, net_h, net_w):
|
||||
grid_h, grid_w = netout_old.shape[:2]
|
||||
nb_box = 3
|
||||
netout = netout.reshape((grid_h, grid_w, nb_box, -1))
|
||||
nb_class = netout.shape[-1] - 5
|
||||
#netout = netout.reshape((grid_h, grid_w, nb_box, -1))
|
||||
netout_old = tf.reshape(netout_old, (grid_h, grid_w, nb_box, -1))
|
||||
nb_class = netout_old.shape[-1] - 5
|
||||
|
||||
boxes = []
|
||||
## Tensorflow v.2
|
||||
#print(tf.shape(netout))
|
||||
aux_1 = _sigmoid(netout_old[..., :2])
|
||||
#print(tf.shape(aux_1))
|
||||
aux_2 = _sigmoid(netout_old[..., 4])
|
||||
#print(tf.shape(aux_2[..., np.newaxis]))
|
||||
aux_3 = aux_2[..., np.newaxis] * _softmax(netout_old[..., 5:])
|
||||
aux_4 = aux_3 * (aux_3 > obj_thresh)
|
||||
#print(tf.shape(aux_4))
|
||||
netout = tf.concat([aux_1,netout_old[..., 2:4] ,aux_2[..., np.newaxis], aux_4], 3)
|
||||
#print(tf.shape(new_netout))
|
||||
|
||||
netout[..., :2] = _sigmoid(netout[..., :2])
|
||||
netout[..., 4] = _sigmoid(netout[..., 4])
|
||||
netout[..., 5:] = netout[..., 4][..., np.newaxis] * _softmax(netout[..., 5:])
|
||||
netout[..., 5:] *= netout[..., 5:] > obj_thresh
|
||||
#netout[..., :2] = _sigmoid(netout[..., :2])
|
||||
#netout[..., 4] = _sigmoid(netout[..., 4])
|
||||
#netout[..., 5:] = netout[..., 4][..., np.newaxis] * _softmax(netout[..., 5:])
|
||||
#netout[..., 5:] *= netout[..., 5:] > obj_thresh
|
||||
|
||||
for i in range(grid_h*grid_w):
|
||||
row = i // grid_w
|
||||
@@ -198,7 +211,7 @@ def decode_netout(netout, anchors, obj_thresh, net_h, net_w):
|
||||
h = anchors[2 * b + 1] * np.exp(h) / net_h # unit: image height
|
||||
|
||||
# last elements are class probabilities
|
||||
classes = netout[row,col,b,5:]
|
||||
classes = np.array(netout[row,col,b,5:])
|
||||
|
||||
box = BoundBox(x-w/2, y-h/2, x+w/2, y+h/2, objectness, classes)
|
||||
|
||||
|
||||
Reference in New Issue
Block a user