Skip to content

Commit 71f75f4

Browse files
committed
Fix progress status, check for already processed face detection
1 parent 3c52516 commit 71f75f4

File tree

3 files changed

+23
-23
lines changed

3 files changed

+23
-23
lines changed

main_ops.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -156,15 +156,17 @@ def create_by_location(entry, dest_dir, db_meta, db_hash_datename):
156156
return '{} has location data'.format(basename), None, None
157157

158158

159-
def detect_faces(entry, dest_dir, db_hash_datename):
159+
def detect_faces(entry, dest_dir, db_hash_face, db_hash_datename):
160160
source = entry[0]
161161
sha512 = entry[1]
162162

163163
basename = os.path.basename(source)
164164
hashed_path = os.path.join(dest_dir, 'hashed/raw', sha512)
165165
date_basename = db_ops.get(db_hash_datename, sha512)
166166

167-
summary = face_ops.detect_faces(source)
167+
summary = db_ops.get(db_hash_face, sha512)
168+
if not isinstance(summary, list):
169+
summary = face_ops.detect_faces(source)
168170

169171
ret_str = ''
170172
if len(summary) >= 1:

mt_ops.py

Lines changed: 12 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -2,34 +2,38 @@
22
from basic_ops import *
33

44

5-
def handler(handler_function, handler_queue, extra_args, progress_status, db):
5+
def handler(handler_function, handler_queue, extra_args, progress_max, progress_current, db):
66
process_id = multiprocessing.current_process().name
77
while True:
88
entry = handler_queue.get()
99
try:
1010
log_output, k, v = handler_function(entry, *extra_args)
1111
if k and db:
12-
if v:
12+
if v != None:
1313
db.set(k, pickle.dumps(v))
1414
else:
1515
db.append(k, b'')
1616
except Exception:
1717
print('Failed to handle {}'.format(entry))
1818
traceback.print_exc(file=sys.stdout)
1919
os._exit(10)
20-
if progress_status:
21-
progress_status['current'] += 1
22-
stdout('{:6.2f}% Process {}: {}'.format(progress_status['current']/progress_status['sum'], process_id, log_output))
20+
if progress_max:
21+
progress_current.value += 1
22+
stdout('{:6.2f}% Process {}: {}'.format(progress_current.value/progress_max, process_id, log_output))
2323
else:
2424
stdout('Process {}: {}'.format(process_id, log_output))
2525
handler_queue.task_done()
2626

2727

28-
def iter_threaded(iter_funtion, handler_function, db=None, handler_args=(), num_threads=8, size_queue=10, iter_args=(), progress_status=None):
28+
def iter_threaded(iter_funtion, handler_function, db=None, handler_args=(), num_threads=8, size_queue=10, iter_args=(), progress_max=None):
2929
processes = []
30+
progress_current = None
31+
if progress_max:
32+
progress_max /= 100
33+
progress_current = multiprocessing.Value('i', 0)
3034
handler_queue = multiprocessing.JoinableQueue(size_queue)
3135
for i in range(0, num_threads):
32-
process = multiprocessing.Process(name = '{}'.format(i), target=handler, args=(handler_function, handler_queue, handler_args, progress_status, db))
36+
process = multiprocessing.Process(name = '{}'.format(i), target=handler, args=(handler_function, handler_queue, handler_args, progress_max, progress_current, db))
3337
process.start()
3438
processes.append(process)
3539
for request in iter_funtion(*iter_args):
@@ -39,6 +43,6 @@ def iter_threaded(iter_funtion, handler_function, db=None, handler_args=(), num_
3943
for process in processes:
4044
process.terminate()
4145
process.join()
42-
if progress_status:
46+
if progress_max:
4347
sys.stdout.write('\r100.00%')
4448
print('\n')

pic_sort.py

Lines changed: 7 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -66,35 +66,29 @@ def main():
6666
entries = db.source_hash.dbsize()
6767

6868
print_bold('copy/move all files')
69-
progress_status = {'current': 0, 'sum': entries/100}
70-
iter_threaded(db_ops.iter_db, copy_move_file, progress_status = progress_status, db=db.hash_meta,
69+
iter_threaded(db_ops.iter_db, copy_move_file, progress_max = entries, db=db.hash_meta,
7170
iter_args=(db.source_hash,), num_threads = args.threads, size_queue = args.queue_size, handler_args = (dest_dir, move_file, ))
7271

7372
print_bold('parse meta data')
74-
progress_status = {'current': 0, 'sum': entries/100}
75-
iter_threaded(db_ops.iter_db, get_meta_data, progress_status = progress_status, db=db.hash_meta,
73+
iter_threaded(db_ops.iter_db, get_meta_data, progress_max = entries, db=db.hash_meta,
7674
iter_args=(db.source_hash,), num_threads = args.threads, size_queue = args.queue_size, handler_args = (dest_dir, args.max_diff, ))
7775

7876
print_bold('create date links')
79-
progress_status = {'current': 0, 'sum': entries/100}
80-
iter_threaded(db_ops.iter_db, create_date_link, progress_status = progress_status, db=db.hash_datename,
77+
iter_threaded(db_ops.iter_db, create_date_link, progress_max = entries, db=db.hash_datename,
8178
iter_args=(db.source_hash,), num_threads = args.threads, size_queue = args.queue_size, handler_args = (dest_dir, db.hash_meta, ))
8279

8380
print_bold('create by links')
84-
progress_status = {'current': 0, 'sum': entries/100}
85-
iter_threaded(db_ops.iter_db, create_by_link, progress_status = progress_status,
81+
iter_threaded(db_ops.iter_db, create_by_link, progress_max = entries,
8682
iter_args=(db.source_hash,), num_threads = args.threads, size_queue = args.queue_size, handler_args = (dest_dir, db.hash_meta, db.hash_datename, ))
8783

8884
print_bold('create location links')
89-
progress_status = {'current': 0, 'sum': entries/100}
90-
iter_threaded(db_ops.iter_db, create_by_location, progress_status = progress_status,
85+
iter_threaded(db_ops.iter_db, create_by_location, progress_max = entries,
9186
iter_args=(db.source_hash,), num_threads = args.threads, size_queue = args.queue_size, handler_args = (dest_dir, db.hash_meta, db.hash_datename, ))
9287

9388
if not args.skip_faces:
9489
print_bold('detect faces')
95-
progress_status = {'current': 0, 'sum': entries/100}
96-
iter_threaded(db_ops.iter_db, detect_faces, progress_status = progress_status, db=db.hash_face,
97-
iter_args=(db.source_hash,), num_threads = args.threads, size_queue = args.queue_size, handler_args = (dest_dir, db.hash_datename, ))
90+
iter_threaded(db_ops.iter_db, detect_faces, progress_max = entries, db=db.hash_face,
91+
iter_args=(db.source_hash,), num_threads = args.threads, size_queue = args.queue_size, handler_args = (dest_dir, db.hash_face, db.hash_datename, ))
9892

9993
print('\n finished processed {} files\n'.format(entries))
10094

0 commit comments

Comments
 (0)