马上注册,享用更多功能,让你轻松玩转AIHIA梦工厂!
您需要 登录 才可以下载或查看,没有账号?立即注册
x
**一、pairs 文件制作**
名单包含两种内容:
标签为1, 是同一个人的两两组合图相对,
不同人的两两组合图像对,标签是0,代表是不同人

看网上很多人写的在生成不同人的图像对时忽略了,同一个人员ID目录下包含多张图的情况,我这里进行了修改,代码如下:
```bash
import argparse
import itertools
import os
import random
import time
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Generate image pairs.')
parser.add_argument('--dataset', default='G:/myFaceData/myvaldata/imgs/',
type=str, help='Dataset path.')
parser.add_argument('--output', default='G:/myFaceData/myvaldata/pairs1.txt', type=str, help='Output path.')
parser.add_argument('--same', default=5000, type=int,
help="Number of same pairs for each person.")
args = parser.parse_args()
assert args.dataset and args.output, \
'Dataset and output should be defined.'
file = open(args.output, 'w+')
#file.writelines('\n')
allList = []
# ---- Generate same pairs for each person
same = 0
personList = os.listdir(args.dataset)
samePairs = []
for person in personList:
if not os.path.isdir(os.path.join(args.dataset, person)):
continue
IDPath = os.path.join(args.dataset, person)
subList = []
images = os.listdir(os.path.join(args.dataset, person))
for image in images:
if not image.lower().endswith(('jpg', 'jpeg', 'png')):
continue
imagePath = os.path.join(args.dataset, person, image)
allList.append(imagePath)#所有的路径都存下来
subList.append(imagePath)
for pair in itertools.combinations(subList, 2):
samePairs.append(pair)
choices = random.sample(samePairs, args.same)
for pair in choices:
file.writelines(f'{pair[0]} {pair[1]} {1}\n')
same += 1
# ---- Generate different pairs
print(f"Will generate {same} different pairs.")
count = 0
sets = set()
while True:
if count >= same:
break
random.seed(time.time() * 100000 % 10000)
random.shuffle(allList)
indexa=random.sample(range(0, len(allList)),1)[0]
indexb=random.sample(range(0, len(allList)),1)[0]
IDPath_1 = os.path.split(allList[indexa])
IDPath_2 = os.path.split(allList[indexb])
if allList[indexa] != allList[indexb] and IDPath_1[0]!= IDPath_2[0] and not (allList[indexa], allList[indexb]) in sets:
sets.add((allList[indexa], allList[indexb]))
sets.add((allList[indexb], allList[indexa]))
file.writelines(f'{allList[indexa]} {allList[indexb]} {0}\n')
count += 1
```
**二、bin 文件制作**
同样网上找到跑不通,自己修改了一些内容。
```bash
#coding:utf-8
import mxnet as mx
from mxnet import ndarray as nd
import argparse
import pickle
import sys
import os
import numpy as np
import pdb
import matplotlib.pyplot as plt
def read_pairs(pairs_filename):
pairs = []
with open(pairs_filename, 'r') as f:
for line in f.readlines()[0:]:
pair = line.strip().split(',')
pairs.append(pair)
return np.array(pairs)
def get_paths(pairs, same_pairs):
nrof_skipped_pairs = 0
path_list = []
issame_list = []
cnt = 1
for pair in pairs:
path0 = pair[0].split(' ')[0]
path1 = pair[0].split(' ')[1]
if cnt < same_pairs:
issame = True
else:
issame = False
if os.path.exists(path0) and os.path.exists(path1): # Only add the pair if both paths exist
path_list += (path0, path1)
issame_list.append(issame)
else:
print('not exists', path0, path1)
nrof_skipped_pairs += 1
cnt += 1
if nrof_skipped_pairs > 0:
print('Skipped %d image pairs' % nrof_skipped_pairs)
return path_list, issame_list
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Package images')
# general
parser.add_argument('--data-dir', default='G:/myFaceData/myvaldata/imgs', help='')
parser.add_argument('--image-size', type=str, default='112,112', help='')
parser.add_argument('--output', default='G:/myFaceData/myvaldata/myval.bin', help='path to save.')
parser.add_argument('--txtfile', default='G:/myFaceData/myvaldata/pairs.txt', help='txtfile path.')
args = parser.parse_args()
image_size = [int(x) for x in args.image_size.split(',')]
img_pairs = read_pairs(args.txtfile)
img_paths, issame_list = get_paths(img_pairs, 5000) # 这里的15925是相同图像对的个数,需要按照实际产生的相同图像对数量替换
img_bins = []
i = 0
for path in img_paths:
with open(path, 'rb') as fin:
_bin = fin.read()
img_bins.append(_bin)
i += 1
with open(args.output, 'wb') as f:
pickle.dump((img_bins, issame_list), f, protocol=pickle.HIGHEST_PROTOCOL)
```
|