@@ -1590,67 +1590,46 @@ def test_presigned_post_policy(log_entry):
1590
1590
1591
1591
def test_thread_safe (log_entry ):
1592
1592
"""Test thread safety."""
1593
-
1594
- # Create sha-sum value for the user provided
1595
- # source file, 'test_file'
1596
- test_file_sha_sum = _get_sha256sum (_LARGE_FILE )
1597
-
1598
- # Get a unique bucket_name and object_name
1599
1593
bucket_name = _gen_bucket_name ()
1600
1594
object_name = f"{ uuid4 ()} "
1601
-
1602
1595
log_entry ["args" ] = {
1603
1596
"bucket_name" : bucket_name ,
1604
1597
"object_name" : object_name ,
1605
1598
}
1599
+ _CLIENT .make_bucket (bucket_name )
1606
1600
1607
- # A list of exceptions raised by get_object_and_check
1608
- # called in multiple threads.
1601
+ test_file_sha256sum = _get_sha256sum (_LARGE_FILE )
1609
1602
exceptions = []
1610
1603
1611
- # get_object_and_check() downloads an object, stores it in a file
1612
- # and then calculates its checksum. In case of mismatch, a new
1613
- # exception is generated and saved in exceptions.
1614
1604
def get_object_and_check (index ):
1605
+ local_file = f"copied_file_{ index } "
1615
1606
try :
1616
- local_file = f"copied_file_{ index } "
1617
1607
_CLIENT .fget_object (bucket_name , object_name , local_file )
1618
- copied_file_sha_sum = _get_sha256sum (local_file )
1619
- # Compare sha-sum values of the source file and the copied one
1620
- if test_file_sha_sum != copied_file_sha_sum :
1608
+ if _get_sha256sum (local_file ) != test_file_sha256sum :
1621
1609
raise ValueError (
1622
- 'Sha-sum mismatch on multi-threaded put and '
1623
- 'get objects' )
1610
+ "checksum mismatch on multi-threaded put/get objects" )
1624
1611
except Exception as exc : # pylint: disable=broad-except
1625
1612
exceptions .append (exc )
1626
1613
finally :
1627
- # Remove downloaded file
1628
1614
_ = os .path .isfile (local_file ) and os .remove (local_file )
1629
1615
1630
- _CLIENT .make_bucket (bucket_name )
1631
- no_of_threads = 5
1632
1616
try :
1633
- # Put/Upload 'no_of_threads' many objects
1634
- # simultaneously using multi-threading
1635
- for _ in range (no_of_threads ):
1617
+ thread_count = 5
1618
+
1619
+ # Start threads for put object.
1620
+ for _ in range (thread_count ):
1636
1621
thread = Thread (target = _CLIENT .fput_object ,
1637
1622
args = (bucket_name , object_name , _LARGE_FILE ))
1638
1623
thread .start ()
1639
1624
thread .join ()
1640
1625
1641
- # Get/Download 'no_of_threads' many objects
1642
- # simultaneously using multi-threading
1643
- thread_list = []
1644
- for i in range (no_of_threads ):
1645
- # Create dynamic/varying names for to be created threads
1646
- thread_name = f"thread_{ i } "
1647
- vars ()[thread_name ] = Thread (
1648
- target = get_object_and_check , args = (i ,))
1649
- vars ()[thread_name ].start ()
1650
- thread_list .append (vars ()[thread_name ])
1651
-
1652
- # Wait until all threads to finish
1653
- for thread in thread_list :
1626
+ # Start threads for get object.
1627
+ threads = []
1628
+ for i in range (thread_count ):
1629
+ thread = Thread (target = get_object_and_check , args = (i ,))
1630
+ threads .append (thread )
1631
+ thread .start ()
1632
+ for thread in threads :
1654
1633
thread .join ()
1655
1634
1656
1635
if exceptions :
0 commit comments