hasher = hashlib.sha1()
num_threads = 5
smallest_data = b'swineflu'
- data = smallest_data*200000
+ data = smallest_data * 200000
expected_hash = hashlib.sha1(data*num_threads).hexdigest()
- def hash_in_chunks(chunk_size, event):
+ def hash_in_chunks(chunk_size):
index = 0
while index < len(data):
- hasher.update(data[index:index+chunk_size])
+ hasher.update(data[index:index + chunk_size])
index += chunk_size
- event.set()
- events = []
+ threads = []
for threadnum in range(num_threads):
- chunk_size = len(data) // (10**threadnum)
+ chunk_size = len(data) // (10 ** threadnum)
self.assertGreater(chunk_size, 0)
self.assertEqual(chunk_size % len(smallest_data), 0)
- event = threading.Event()
- events.append(event)
- threading.Thread(target=hash_in_chunks,
- args=(chunk_size, event)).start()
-
- for event in events:
- event.wait()
+ thread = threading.Thread(target=hash_in_chunks,
+ args=(chunk_size,))
+ threads.append(thread)
+
+ for thread in threads:
+ thread.start()
+ for thread in threads:
+ thread.join()
self.assertEqual(expected_hash, hasher.hexdigest())