214 lines
9.5 KiB
Python
214 lines
9.5 KiB
Python
from base.postprocess import postprocess_hw
|
|
import numpy as np
|
|
import math
|
|
import kneron_preprocessing
|
|
|
|
def person_head_bbox_matching_fun1(bboxes):
|
|
x1 = bboxes[:, 0]
|
|
y1 = bboxes[:, 1]
|
|
x2 = bboxes[:, 0] + bboxes[:, 2]
|
|
y2 = bboxes[:, 1] + bboxes[:, 3]
|
|
xc = bboxes[:, 0] + bboxes[:, 2] * 0.5
|
|
# 'areas' = (width of bbox) * (height of bbox)
|
|
areas = (x2 - x1 + 1) * (y2 - y1 + 1)
|
|
return x1, y1, x2, y2, xc, areas
|
|
|
|
def person_head_bbox_matching_fun2(box_i, bboxes):
|
|
box_i_x1, box_i_y1, box_i_x2, box_i_y2 = box_i[:]
|
|
bboxes_x1, bboxes_y1, bboxes_x2, bboxes_y2 = bboxes[:]
|
|
# 'xx1' with shape(number of head bboxes). the top-left-x1 of the overlap = max( top-left-x1 of current person bbox 'ii', top-left-x1 of the head bbox)
|
|
xx1 = np.maximum(box_i_x1, bboxes_x1)
|
|
# 'yy1' with shape(number of head bboxes). the top-left-y1 of the overlap = max( top-left-y1 of current person bbox 'ii', top-left-y1 of the head bbox)
|
|
yy1 = np.maximum(box_i_y1, bboxes_y1)
|
|
# 'xx2' with shape(number of head bboxes). the bottom-right-x2 of the overlap = min( bottom-right-x2 of current person bbox 'ii', bottom-right-x2 of the head bbox)
|
|
xx2 = np.minimum(box_i_x2, bboxes_x2)
|
|
# 'yy2' with shape(number of head bboxes). the bottom-right-y2 of the overlap = min( bottom-right-y2 of current person bbox 'ii', bottom-right-y2 of the head bbox)
|
|
yy2 = np.minimum(box_i_y2, bboxes_y2)
|
|
|
|
# 'w' : width of the overlap must be greater 0.0
|
|
w = np.maximum(0.0, xx2 - xx1 + 1)
|
|
# 'h' : height of the overlap must be greater 0.0
|
|
h = np.maximum(0.0, yy2 - yy1 + 1)
|
|
|
|
# 'inter' : the area of the overlap with shape(number of head bboxes) = (width of the overlap) * (height of the overlap)
|
|
inter = w * h
|
|
return inter
|
|
|
|
def person_head_bbox_matching(pre_output, **kwargs):
|
|
'''
|
|
# maintainer : doris
|
|
# function : (1) each person bbox 'ii' find the most suitable candidate head among all the head bboxes
|
|
# (2) calculate the 'ovr' : area of the overlap between the person and head bbox
|
|
# (3) keep the head bbox which 'ovr' > thresh
|
|
# (4) if more than one matching head bbox,
|
|
# calculate the sum of [ the distance between (xc of the matching head bbox) and (xc of the current person bbox 'ii') ] and
|
|
# [ the distance between (y1 of the matching head bbox) and (y1 of the current person bbox 'ii') ]
|
|
# The smallest sum is the most suitable candidate head bbox
|
|
# input :
|
|
# pre_output : List of bboxes[x1, y1, w, h, score, class_id]. where xy1=top-left
|
|
# output :
|
|
# pair_p : List of person bbox[x1, y1, w, h, score, class_id]. where xy1=top-left
|
|
# pair_h : List of head bbox[x1, y1, w, h, score, class_id]. where xy1=top-left
|
|
'''
|
|
thresh_head_iou = kwargs.get('thresh_head_iou', 0.8)
|
|
thresh_fbox_iou = kwargs.get('thresh_fbox_iou', 0.9)
|
|
thresh_person_score = kwargs.get('thresh_person_score', 0.6)
|
|
person_bboxes, head_bboxes = [], []
|
|
for bbox in pre_output:
|
|
if bbox[5]==15.0:
|
|
if bbox[4] >= thresh_person_score:
|
|
person_bboxes.append(bbox)
|
|
else:
|
|
head_bboxes.append(bbox)
|
|
if len(person_bboxes)==0 or len(head_bboxes)==0:
|
|
return [], []
|
|
person_bboxes, head_bboxes = np.asarray(person_bboxes), np.asarray(head_bboxes)
|
|
person_x1, person_y1, person_x2, person_y2, person_xc, person_areas = person_head_bbox_matching_fun1(person_bboxes)
|
|
head_x1, head_y1, head_x2, head_y2, head_xc, head_areas = person_head_bbox_matching_fun1(head_bboxes)
|
|
|
|
pair_p, pair_h, heads_candidates, person_candidate, vbox = [], [], [], np.asarray([-1 for ii in range(head_bboxes.shape[0])]), []
|
|
|
|
# each person bbox 'ii' find the most suitable candidate head among all the head bboxes
|
|
for ii in range(person_bboxes.shape[0]):
|
|
inter_person = person_head_bbox_matching_fun2([person_x1[ii], person_y1[ii], person_x2[ii], person_y2[ii]], [person_x1, person_y1, person_x2, person_y2])
|
|
# 'ovr' : IOU(intersection over union) with shape(number of person bboxes) = the area of the overlap / the area of the head ###the area of the union
|
|
ovr_person = inter_person / person_areas[ii] #(person_areas[ii] + head_areas - inter)
|
|
inds_fbox = np.where(ovr_person > thresh_fbox_iou)[0]
|
|
if inds_fbox.shape[0]>1:
|
|
heads_candidates.append([])
|
|
continue
|
|
|
|
inter = person_head_bbox_matching_fun2([person_x1[ii], person_y1[ii], person_x2[ii], person_y2[ii]], [head_x1, head_y1, head_x2, head_y2])
|
|
# 'ovr' : IOU(intersection over union) with shape(number of head bboxes) = the area of the overlap / the area of the head ###the area of the union
|
|
ovr = inter / head_areas#(person_areas[ii] + head_areas - inter)
|
|
|
|
# 'inds' : The indexes of the bbox which IOU are less than or equal to 'thresh_head_iou'
|
|
inds = np.where(ovr > thresh_head_iou)[0]
|
|
|
|
# There is no matching head bbox in the current person bbox 'ii'
|
|
if inds.shape[0]==0:
|
|
heads_candidates.append([])
|
|
continue
|
|
|
|
else:
|
|
heads_candidates.append(inds.tolist())
|
|
person_candidate, vbox_tmp = find_vbox(heads_candidates,person_candidate,inds,ii,ovr_person,person_areas)
|
|
if len(vbox_tmp)>0 :
|
|
vbox.append(vbox_tmp[0])
|
|
|
|
# There is one matching head bbox in the current person bbox 'ii'
|
|
if inds.shape[0]==1:
|
|
pair_p.append(person_bboxes[ii].tolist())
|
|
pair_h.append(head_bboxes[inds][0].tolist())
|
|
|
|
|
|
# There is more than one matching head bbox in the current person bbox 'ii'
|
|
else:
|
|
# 'head_bboxes_' : all of the matching head bbox in the current person bbox 'ii'
|
|
head_bboxes_ = head_bboxes[inds]
|
|
head_xc_ = head_xc[inds]
|
|
head_y1_ = head_y1[inds]
|
|
|
|
# sum of [ the distance between (xc of the matching head bbox) and (xc of the current person bbox 'ii') ] and
|
|
# [ the distance between (y1 of the matching head bbox) and (y1 of the current person bbox 'ii') ]
|
|
# The smallest sum is the most suitable candidate head bbox
|
|
inds_ = np.argsort( abs(person_xc[ii]-head_xc_) + abs(person_y1[ii]-head_y1_))[0]
|
|
pair_p.append(person_bboxes[ii].tolist())
|
|
pair_h.append(head_bboxes_[inds_].tolist())
|
|
|
|
pair_p_remove_vbox, pair_h_remove_vbox = [], []
|
|
for b_i, (bbox_p, bbox_h) in enumerate(zip(pair_p,pair_h)):
|
|
if b_i in vbox: continue
|
|
if len(bbox_p)==0: continue
|
|
pair_p_remove_vbox.append(bbox_p)
|
|
pair_h_remove_vbox.append(bbox_h)
|
|
|
|
return pair_p_remove_vbox, pair_h_remove_vbox
|
|
|
|
|
|
def reverse_left_right_kpts_fullbody(pre_output,**kwargs):
|
|
'''
|
|
author: stevenho
|
|
Reverse kpts from coco to public fields format for fullbody (17) kpts
|
|
:param pre_output: kpts
|
|
:return: flattened kpts list
|
|
'''
|
|
kpts = []
|
|
for joints in pre_output:
|
|
joints = np.array(list(chunks(joints, 2)))
|
|
joints = np.array([joints[0],joints[2],joints[1],joints[4],joints[3],joints[6],joints[5],joints[8],joints[7],joints[10],joints[9],joints[12],joints[11],joints[14],joints[13],joints[16],joints[15]]).flatten()
|
|
kpts.append(joints.tolist())
|
|
return kpts
|
|
|
|
def find_vbox(heads_candidates, person_candidate, heads_inds, person_ii, ovr_person,person_areas):
|
|
vbox = []
|
|
# head still not having matching person
|
|
if person_candidate[heads_inds[0]]<0:
|
|
person_candidate[heads_inds[0]] = person_ii
|
|
|
|
# head is already having matching person
|
|
else:
|
|
compete_person = person_candidate[heads_inds][0]
|
|
current_person_heads_candidates = heads_candidates[person_ii]
|
|
compete_person_heads_candidates = heads_candidates[compete_person]
|
|
|
|
# if the compete_person has only one head
|
|
if len(compete_person_heads_candidates)==1 and len(current_person_heads_candidates)==1 :
|
|
if person_areas[person_ii] <=person_areas[compete_person]:
|
|
vbox.append(person_ii)
|
|
|
|
# compete_person bbox is the vbox
|
|
else:
|
|
vbox.append(compete_person)
|
|
|
|
return person_candidate, vbox
|
|
def chunks(lst, n):
|
|
'''
|
|
author: stevenho
|
|
Yield successive n-sized chunks from lst. Copied from stackoverflow
|
|
:param lst: input list
|
|
:param n: size of lists to chunk lst into
|
|
:return: list of lists containing elements of size n
|
|
'''
|
|
for i in range(0, len(lst), n):
|
|
yield lst[i:i + n]
|
|
|
|
|
|
def postprocess_(pre_output, type, **kwargs):
|
|
|
|
if type == 48:
|
|
return reverse_left_right_kpts_fullbody(pre_output,**kwargs)
|
|
elif type == 60:
|
|
return person_head_bbox_matching(pre_output, **kwargs)
|
|
else:
|
|
assert 0
|
|
|
|
|
|
class FunctionRunner:
|
|
# only used for 10 pts landmark to bbox
|
|
def __init__(self, type, multiple_input=False, **kwargs):
|
|
'''
|
|
:param class_setup: 0 5pt to eye bbox, 1 5pts to face bbox
|
|
:param n_lmk: how many landmark as input
|
|
:param kwargs:
|
|
'''
|
|
self.init_config = locals()
|
|
self.init_config.update(kwargs)
|
|
|
|
def run(self, img_path, pre_output=None, **kwargs):
|
|
"""
|
|
self.landmarks_list: list of list
|
|
|
|
-return: dets: [[x,y,w,h]]
|
|
"""
|
|
post_config = {
|
|
'img_path': img_path
|
|
}
|
|
if pre_output is None:
|
|
return []
|
|
post_config.update(self.init_config)
|
|
post_config.update(kwargs)
|
|
result = postprocess_hw(pre_output, postprocess_, **post_config)
|
|
|
|
return result
|