Use timeout to keep the file progress printing smooth
authorkrj <krj@f5534014-38df-0310-8fa8-9805f1628bb7>
Fri, 25 Sep 2009 19:41:54 +0000 (19:41 +0000)
committerkrj <krj@f5534014-38df-0310-8fa8-9805f1628bb7>
Fri, 25 Sep 2009 19:41:54 +0000 (19:41 +0000)
git-svn-id: http://anonsvn.wireshark.org/wireshark/trunk@30153 f5534014-38df-0310-8fa8-9805f1628bb7

tools/indexcap.py

index f9ff217c7a65dd37f34d7b2830fbb1b280d4676a..4fa615e184eadb1ec002cda25a3a4f02d243f819 100644 (file)
@@ -26,7 +26,7 @@
 #
 
 from optparse import OptionParser
-from multiprocessing import Process, Pool
+import multiprocessing
 import sys
 import os
 import subprocess
@@ -125,16 +125,22 @@ def main():
     cap_files = find_capture_files(paths, cap_hash)
     cap_files.sort()
     print len(cap_files), "total files,",
+    options.max_files = min(options.max_files, len(cap_files))
     cap_files = cap_files[:options.max_files]
     print len(cap_files), "indexable files"
     print "\n"
 
-    pool = Pool(options.num_procs)
+    pool = multiprocessing.Pool(options.num_procs)
     results = [pool.apply_async(process_capture_file, [tshark, file]) for file in cap_files]
     cur_item_num = 0
     for result in results:
         cur_item_num += 1
-        file_result = result.get()
+        try:
+            file_result = result.get(1)
+        except multiprocessing.TimeoutError:
+            cur_item_num -= 1
+            continue
+
         if file_result is None:
             continue