@@ -66,35 +66,29 @@ def main():
66
66
entries = db .source_hash .dbsize ()
67
67
68
68
print_bold ('copy/move all files' )
69
- progress_status = {'current' : 0 , 'sum' : entries / 100 }
70
- iter_threaded (db_ops .iter_db , copy_move_file , progress_status = progress_status , db = db .hash_meta ,
69
+ iter_threaded (db_ops .iter_db , copy_move_file , progress_max = entries , db = db .hash_meta ,
71
70
iter_args = (db .source_hash ,), num_threads = args .threads , size_queue = args .queue_size , handler_args = (dest_dir , move_file , ))
72
71
73
72
print_bold ('parse meta data' )
74
- progress_status = {'current' : 0 , 'sum' : entries / 100 }
75
- iter_threaded (db_ops .iter_db , get_meta_data , progress_status = progress_status , db = db .hash_meta ,
73
+ iter_threaded (db_ops .iter_db , get_meta_data , progress_max = entries , db = db .hash_meta ,
76
74
iter_args = (db .source_hash ,), num_threads = args .threads , size_queue = args .queue_size , handler_args = (dest_dir , args .max_diff , ))
77
75
78
76
print_bold ('create date links' )
79
- progress_status = {'current' : 0 , 'sum' : entries / 100 }
80
- iter_threaded (db_ops .iter_db , create_date_link , progress_status = progress_status , db = db .hash_datename ,
77
+ iter_threaded (db_ops .iter_db , create_date_link , progress_max = entries , db = db .hash_datename ,
81
78
iter_args = (db .source_hash ,), num_threads = args .threads , size_queue = args .queue_size , handler_args = (dest_dir , db .hash_meta , ))
82
79
83
80
print_bold ('create by links' )
84
- progress_status = {'current' : 0 , 'sum' : entries / 100 }
85
- iter_threaded (db_ops .iter_db , create_by_link , progress_status = progress_status ,
81
+ iter_threaded (db_ops .iter_db , create_by_link , progress_max = entries ,
86
82
iter_args = (db .source_hash ,), num_threads = args .threads , size_queue = args .queue_size , handler_args = (dest_dir , db .hash_meta , db .hash_datename , ))
87
83
88
84
print_bold ('create location links' )
89
- progress_status = {'current' : 0 , 'sum' : entries / 100 }
90
- iter_threaded (db_ops .iter_db , create_by_location , progress_status = progress_status ,
85
+ iter_threaded (db_ops .iter_db , create_by_location , progress_max = entries ,
91
86
iter_args = (db .source_hash ,), num_threads = args .threads , size_queue = args .queue_size , handler_args = (dest_dir , db .hash_meta , db .hash_datename , ))
92
87
93
88
if not args .skip_faces :
94
89
print_bold ('detect faces' )
95
- progress_status = {'current' : 0 , 'sum' : entries / 100 }
96
- iter_threaded (db_ops .iter_db , detect_faces , progress_status = progress_status , db = db .hash_face ,
97
- iter_args = (db .source_hash ,), num_threads = args .threads , size_queue = args .queue_size , handler_args = (dest_dir , db .hash_datename , ))
90
+ iter_threaded (db_ops .iter_db , detect_faces , progress_max = entries , db = db .hash_face ,
91
+ iter_args = (db .source_hash ,), num_threads = args .threads , size_queue = args .queue_size , handler_args = (dest_dir , db .hash_face , db .hash_datename , ))
98
92
99
93
print ('\n [1;32m finished [0;32mprocessed {} files[0m\n [0m' .format (entries ))
100
94
0 commit comments