mirror of
https://github.com/python/cpython.git
synced 2025-07-29 14:15:07 +00:00
Issue #4751: For hashlib algorithms provided by OpenSSL, the Python
GIL is now released during computation on data lengths >= 2048 bytes.
This commit is contained in:
parent
342c8db859
commit
d02eedacab
4 changed files with 162 additions and 53 deletions
|
@ -2,11 +2,16 @@
|
|||
#
|
||||
# $Id$
|
||||
#
|
||||
# Copyright (C) 2005 Gregory P. Smith (greg@krypto.org)
|
||||
# Copyright (C) 2005-2009 Gregory P. Smith (greg@krypto.org)
|
||||
# Licensed to PSF under a Contributor Agreement.
|
||||
#
|
||||
|
||||
import hashlib
|
||||
import StringIO
|
||||
try:
|
||||
import threading
|
||||
except ImportError:
|
||||
threading = None
|
||||
import unittest
|
||||
from test import test_support
|
||||
from test.test_support import _4G, precisionbigmemtest
|
||||
|
@ -61,10 +66,10 @@ class HashLibTestCase(unittest.TestCase):
|
|||
def check(self, name, data, digest):
|
||||
# test the direct constructors
|
||||
computed = getattr(hashlib, name)(data).hexdigest()
|
||||
self.assert_(computed == digest)
|
||||
self.assertEqual(computed, digest)
|
||||
# test the general new() interface
|
||||
computed = hashlib.new(name, data).hexdigest()
|
||||
self.assert_(computed == digest)
|
||||
self.assertEqual(computed, digest)
|
||||
|
||||
def check_no_unicode(self, algorithm_name):
|
||||
# Unicode objects are not allowed as input.
|
||||
|
@ -211,6 +216,44 @@ class HashLibTestCase(unittest.TestCase):
|
|||
"e718483d0ce769644e2e42c7bc15b4638e1f98b13b2044285632a803afa973eb"+
|
||||
"de0ff244877ea60a4cb0432ce577c31beb009c5c2c49aa2e4eadb217ad8cc09b")
|
||||
|
||||
def test_threaded_hashing(self):
|
||||
if not threading:
|
||||
raise unittest.SkipTest('No threading module.')
|
||||
|
||||
# Updating the same hash object from several threads at once
|
||||
# using data chunk sizes containing the same byte sequences.
|
||||
#
|
||||
# If the internal locks are working to prevent multiple
|
||||
# updates on the same object from running at once, the resulting
|
||||
# hash will be the same as doing it single threaded upfront.
|
||||
hasher = hashlib.sha1()
|
||||
num_threads = 5
|
||||
smallest_data = 'swineflu'
|
||||
data = smallest_data*200000
|
||||
expected_hash = hashlib.sha1(data*num_threads).hexdigest()
|
||||
|
||||
def hash_in_chunks(chunk_size, event):
|
||||
index = 0
|
||||
while index < len(data):
|
||||
hasher.update(data[index:index+chunk_size])
|
||||
index += chunk_size
|
||||
event.set()
|
||||
|
||||
events = []
|
||||
for threadnum in xrange(num_threads):
|
||||
chunk_size = len(data) // (10**threadnum)
|
||||
assert chunk_size > 0
|
||||
assert chunk_size % len(smallest_data) == 0
|
||||
event = threading.Event()
|
||||
events.append(event)
|
||||
threading.Thread(target=hash_in_chunks,
|
||||
args=(chunk_size, event)).start()
|
||||
|
||||
for event in events:
|
||||
event.wait()
|
||||
|
||||
self.assertEqual(expected_hash, hasher.hexdigest())
|
||||
|
||||
|
||||
def test_main():
|
||||
test_support.run_unittest(HashLibTestCase)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue