-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathgenerate_list.py
executable file
·154 lines (140 loc) · 7.22 KB
/
generate_list.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
##############################################################
### Copyright (c) 2018-present, Xuanyi Dong ###
### Style Aggregated Network for Facial Landmark Detection ###
### Computer Vision and Pattern Recognition, 2018 ###
##############################################################
import numpy as np
import math, pdb
import os, sys
import os.path as osp
from pathlib import Path
import init_path
import datasets
from scipy.io import loadmat
from utils.file_utils import load_list_from_folders, load_txt_file
def load_box(mat_path, cdir):
mat = loadmat(mat_path)
mat = mat['bounding_boxes']
mat = mat[0]
assert len(mat) > 0, 'The length of this mat file should be greater than 0 vs {}'.format(len(mat))
all_object = []
for cobject in mat:
name = cobject[0][0][0][0]
bb_detector = cobject[0][0][1][0]
bb_ground_t = cobject[0][0][2][0]
image_path = osp.join(cdir, name)
image_path = image_path[:-4]
all_object.append( (image_path, bb_detector, bb_ground_t) )
return all_object
def load_mats(lists):
all_objects = []
for dataset in lists:
cobjects = load_box(dataset[0], dataset[1])
all_objects = all_objects + cobjects
return all_objects
def load_all_300w(root_dir, style):
mat_dir = osp.join(root_dir, 'Bounding_Boxes')
pairs = [(osp.join(mat_dir, 'bounding_boxes_lfpw_testset.mat'), osp.join(root_dir, '300W-' + style, 'lfpw', 'testset')),
(osp.join(mat_dir, 'bounding_boxes_lfpw_trainset.mat'), osp.join(root_dir, '300W-' + style, 'lfpw', 'trainset')),
(osp.join(mat_dir, 'bounding_boxes_ibug.mat'), osp.join(root_dir, '300W-' + style, 'ibug')),
(osp.join(mat_dir, 'bounding_boxes_afw.mat'), osp.join(root_dir, '300W-' + style, 'afw')),
(osp.join(mat_dir, 'bounding_boxes_helen_testset.mat'), osp.join(root_dir, '300W-' + style, 'helen', 'testset')),
(osp.join(mat_dir, 'bounding_boxes_helen_trainset.mat'), osp.join(root_dir, '300W-' + style, 'helen', 'trainset')),]
all_datas = load_mats(pairs)
data_dict = {}
for i, cpair in enumerate(all_datas):
image_path = cpair[0].replace(' ', '')
data_dict[ image_path ] = (cpair[1], cpair[2])
return data_dict
def return_box(image_path, pts_path, all_dict, USE_BOX):
image_path = image_path[:-4]
assert image_path in all_dict, '{} not find'.format(image_path)
np_boxes = all_dict[ image_path ]
if USE_BOX == 'GTL':
box_str = datasets.dataset_utils.for_generate_box_str(pts_path, 68, 0)
elif USE_BOX == 'GTB':
box_str = '{:.3f} {:.3f} {:.3f} {:.3f}'.format(np_boxes[1][0], np_boxes[1][1], np_boxes[1][2], np_boxes[1][3])
elif USE_BOX == 'DET':
box_str = '{:.3f} {:.3f} {:.3f} {:.3f}'.format(np_boxes[0][0], np_boxes[0][1], np_boxes[0][2], np_boxes[0][3])
else:
assert False, 'The box indicator not find : {}'.format(USE_BOX)
return box_str
def generage_300w_list(root, save_dir, box_data, SUFFIX):
assert osp.isdir(root), '{} is not dir'.format(root)
if not osp.isdir(save_dir): os.makedirs(save_dir)
train_length, common_length, challeng_length = 3148, 554, 135
subsets = ['afw', 'helen', 'ibug', 'lfpw']
dir_lists = [osp.join(root, subset) for subset in subsets]
imagelist, num_image = load_list_from_folders(dir_lists, ext_filter=['png', 'jpg', 'jpeg'], depth=3)
indoor, indoor_num = load_list_from_folders([osp.join(root, '300W', '01_Indoor')], ext_filter=['png', 'jpg', 'jpeg'], depth=3)
otdoor, otdoor_num = load_list_from_folders([osp.join(root, '300W', '02_Outdoor')], ext_filter=['png', 'jpg', 'jpeg'], depth=3)
assert indoor_num == 300 and otdoor_num == 300, 'The number of images are not right for 300-W-IO: {} & {}'.format(indoor_num, otdoor_num)
train_set, common_set, challeng_set = [], [], []
for image_path in imagelist:
name, ext = osp.splitext(image_path)
anno_path = name + '.pts'
assert osp.isfile(anno_path), 'annotation {} for : {} does not exist'.format(image_path, anno_path)
if name.find('ibug') > 0:
challeng_set.append( (image_path, anno_path) )
elif name.find('afw') > 0:
train_set.append( (image_path, anno_path) )
elif name.find('helen') > 0 or name.find('lfpw') > 0:
if name.find('trainset') > 0:
train_set.append( (image_path, anno_path) )
elif name.find('testset') > 0:
common_set.append( (image_path, anno_path) )
else:
raise Exception('Unknow name : {}'.format(name))
else:
raise Exception('Unknow name : {}'.format(name))
assert len(train_set) == train_length, 'The length is not right for train : {} vs {}'.format(len(train_set), train_length)
assert len(common_set) == common_length, 'The length is not right for common : {} vs {}'.format(len(common_set), common_length)
assert len(challeng_set) == challeng_length, 'The length is not right for challeng : {} vs {}'.format(len(common_set), common_length)
all_lines = []
with open(osp.join(save_dir, '300w.train.' + SUFFIX), 'w') as txtfile:
for cpair in train_set:
box_str = return_box(cpair[0], cpair[1], box_data, SUFFIX)
txtfile.write('{} {} {}\n'.format(cpair[0], cpair[1], box_str))
all_lines.append('{} {} {}\n'.format(cpair[0], cpair[1], box_str))
txtfile.close()
with open(osp.join(save_dir, '300w.test.common.' + SUFFIX), 'w') as txtfile:
for cpair in common_set:
box_str = return_box(cpair[0], cpair[1], box_data, SUFFIX)
txtfile.write('{} {} {}\n'.format(cpair[0], cpair[1], box_str))
all_lines.append('{} {} {}\n'.format(cpair[0], cpair[1], box_str))
txtfile.close()
with open(osp.join(save_dir, '300w.test.challenge.' + SUFFIX), 'w') as txtfile:
for cpair in challeng_set:
box_str = return_box(cpair[0], cpair[1], box_data, SUFFIX)
txtfile.write('{} {} {}\n'.format(cpair[0], cpair[1], box_str))
all_lines.append('{} {} {}\n'.format(cpair[0], cpair[1], box_str))
txtfile.close()
with open(osp.join(save_dir, '300w.test.full.' + SUFFIX), 'w') as txtfile:
for cpair in common_set:
box_str = return_box(cpair[0], cpair[1], box_data, SUFFIX)
txtfile.write('{} {} {}\n'.format(cpair[0], cpair[1], box_str))
all_lines.append('{} {} {}\n'.format(cpair[0], cpair[1], box_str))
for cpair in challeng_set:
box_str = return_box(cpair[0], cpair[1], box_data, SUFFIX)
txtfile.write('{} {} {}\n'.format(cpair[0], cpair[1], box_str))
all_lines.append('{} {} {}\n'.format(cpair[0], cpair[1], box_str))
txtfile.close()
with open(osp.join(save_dir, '300w.all.' + SUFFIX), 'w') as txtfile:
for line in all_lines:
txtfile.write('{}'.format(line))
txtfile.close()
if __name__ == '__main__':
this_dir = osp.dirname(os.path.abspath(__file__))
print ('This dir : {:}, {:}'.format(this_dir, os.environ['HOME']))
path_300w = Path.home() / 'Datasets' / '300W-Style'
print ('300W Dir : {:}'.format(path_300w))
assert path_300w.exists(), '{:} does not exists'.format(path_300w)
path_300w = str(path_300w)
styles = ['Original', 'Gray', 'Light', 'Sketch']
USE_BOXES = ['GTB', 'DET']
for USE_BOX in USE_BOXES:
for style in styles:
box_datas = load_all_300w(path_300w, style)
SAVE_DIR = osp.join(path_300w, 'box-coords', '300W-' + style)
Data_DIR = osp.join(path_300w, '300W-' + style)
generage_300w_list(Data_DIR, SAVE_DIR, box_datas, USE_BOX)