You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
python-s3transfer/s3transfer-remove_online_te...

256 lines
10 KiB

diff -Naur s3transfer-0.0.1.orig/tests/integration/test_s3transfer.py s3transfer-0.0.1/tests/integration/test_s3transfer.py
--- s3transfer-0.0.1.orig/tests/integration/test_s3transfer.py 2016-02-23 14:59:22.338749531 +0100
+++ s3transfer-0.0.1/tests/integration/test_s3transfer.py 2016-02-23 15:01:05.566034716 +0100
@@ -102,251 +102,3 @@
f.full_path('foo/bar.txt') -> /tmp/asdfasd/foo/bar.txt
"""
return os.path.join(self.rootdir, filename)
-
-
-class TestS3Transfers(unittest.TestCase):
- """Tests for the high level s3transfer module."""
-
- @classmethod
- def setUpClass(cls):
- cls.region = 'us-west-2'
- cls.session = botocore.session.get_session()
- cls.client = cls.session.create_client('s3', cls.region)
- cls.bucket_name = random_bucket_name()
- cls.client.create_bucket(
- Bucket=cls.bucket_name,
- CreateBucketConfiguration={'LocationConstraint': cls.region})
-
- def setUp(self):
- self.files = FileCreator()
-
- def tearDown(self):
- self.files.remove_all()
-
- @classmethod
- def tearDownClass(cls):
- cls.client.delete_bucket(Bucket=cls.bucket_name)
-
- def delete_object(self, key):
- self.client.delete_object(
- Bucket=self.bucket_name,
- Key=key)
-
- def object_exists(self, key):
- self.client.head_object(Bucket=self.bucket_name,
- Key=key)
- return True
-
- def create_s3_transfer(self, config=None):
- return s3transfer.S3Transfer(self.client,
- config=config)
-
- def assert_has_public_read_acl(self, response):
- grants = response['Grants']
- public_read = [g['Grantee'].get('URI', '') for g in grants
- if g['Permission'] == 'READ']
- self.assertIn('groups/global/AllUsers', public_read[0])
-
- def test_upload_below_threshold(self):
- config = s3transfer.TransferConfig(
- multipart_threshold=2 * 1024 * 1024)
- transfer = self.create_s3_transfer(config)
- filename = self.files.create_file_with_size(
- 'foo.txt', filesize=1024 * 1024)
- transfer.upload_file(filename, self.bucket_name,
- 'foo.txt')
- self.addCleanup(self.delete_object, 'foo.txt')
-
- self.assertTrue(self.object_exists('foo.txt'))
-
- def test_upload_above_threshold(self):
- config = s3transfer.TransferConfig(
- multipart_threshold=2 * 1024 * 1024)
- transfer = self.create_s3_transfer(config)
- filename = self.files.create_file_with_size(
- '20mb.txt', filesize=20 * 1024 * 1024)
- transfer.upload_file(filename, self.bucket_name,
- '20mb.txt')
- self.addCleanup(self.delete_object, '20mb.txt')
- self.assertTrue(self.object_exists('20mb.txt'))
-
- def test_upload_file_above_threshold_with_acl(self):
- config = s3transfer.TransferConfig(
- multipart_threshold=5 * 1024 * 1024)
- transfer = self.create_s3_transfer(config)
- filename = self.files.create_file_with_size(
- '6mb.txt', filesize=6 * 1024 * 1024)
- extra_args = {'ACL': 'public-read'}
- transfer.upload_file(filename, self.bucket_name,
- '6mb.txt', extra_args=extra_args)
- self.addCleanup(self.delete_object, '6mb.txt')
-
- self.assertTrue(self.object_exists('6mb.txt'))
- response = self.client.get_object_acl(
- Bucket=self.bucket_name, Key='6mb.txt')
- self.assert_has_public_read_acl(response)
-
- def test_upload_file_above_threshold_with_ssec(self):
- key_bytes = os.urandom(32)
- extra_args = {
- 'SSECustomerKey': key_bytes,
- 'SSECustomerAlgorithm': 'AES256',
- }
- config = s3transfer.TransferConfig(
- multipart_threshold=5 * 1024 * 1024)
- transfer = self.create_s3_transfer(config)
- filename = self.files.create_file_with_size(
- '6mb.txt', filesize=6 * 1024 * 1024)
- transfer.upload_file(filename, self.bucket_name,
- '6mb.txt', extra_args=extra_args)
- self.addCleanup(self.delete_object, '6mb.txt')
- # A head object will fail if it has a customer key
- # associated with it and it's not provided in the HeadObject
- # request so we can use this to verify our functionality.
- response = self.client.head_object(
- Bucket=self.bucket_name,
- Key='6mb.txt', **extra_args)
- self.assertEqual(response['SSECustomerAlgorithm'], 'AES256')
-
- def test_progress_callback_on_upload(self):
- self.amount_seen = 0
- lock = threading.Lock()
-
- def progress_callback(amount):
- with lock:
- self.amount_seen += amount
-
- transfer = self.create_s3_transfer()
- filename = self.files.create_file_with_size(
- '20mb.txt', filesize=20 * 1024 * 1024)
- transfer.upload_file(filename, self.bucket_name,
- '20mb.txt', callback=progress_callback)
- self.addCleanup(self.delete_object, '20mb.txt')
-
- # The callback should have been called enough times such that
- # the total amount of bytes we've seen (via the "amount"
- # arg to the callback function) should be the size
- # of the file we uploaded.
- self.assertEqual(self.amount_seen, 20 * 1024 * 1024)
-
- def test_callback_called_once_with_sigv4(self):
- # Verify #98, where the callback was being invoked
- # twice when using signature version 4.
- self.amount_seen = 0
- lock = threading.Lock()
-
- def progress_callback(amount):
- with lock:
- self.amount_seen += amount
-
- client = self.session.create_client(
- 's3', self.region,
- config=Config(signature_version='s3v4'))
- transfer = s3transfer.S3Transfer(client)
- filename = self.files.create_file_with_size(
- '10mb.txt', filesize=10 * 1024 * 1024)
- transfer.upload_file(filename, self.bucket_name,
- '10mb.txt', callback=progress_callback)
- self.addCleanup(self.delete_object, '10mb.txt')
-
- self.assertEqual(self.amount_seen, 10 * 1024 * 1024)
-
- def test_can_send_extra_params_on_upload(self):
- transfer = self.create_s3_transfer()
- filename = self.files.create_file_with_size('foo.txt', filesize=1024)
- transfer.upload_file(filename, self.bucket_name,
- 'foo.txt', extra_args={'ACL': 'public-read'})
- self.addCleanup(self.delete_object, 'foo.txt')
-
- response = self.client.get_object_acl(
- Bucket=self.bucket_name, Key='foo.txt')
- self.assert_has_public_read_acl(response)
-
- def test_can_configure_threshold(self):
- config = s3transfer.TransferConfig(
- multipart_threshold=6 * 1024 * 1024
- )
- transfer = self.create_s3_transfer(config)
- filename = self.files.create_file_with_size(
- 'foo.txt', filesize=8 * 1024 * 1024)
- transfer.upload_file(filename, self.bucket_name,
- 'foo.txt')
- self.addCleanup(self.delete_object, 'foo.txt')
-
- self.assertTrue(self.object_exists('foo.txt'))
-
- def test_can_send_extra_params_on_download(self):
- # We're picking the customer provided sse feature
- # of S3 to test the extra_args functionality of
- # S3.
- key_bytes = os.urandom(32)
- extra_args = {
- 'SSECustomerKey': key_bytes,
- 'SSECustomerAlgorithm': 'AES256',
- }
- self.client.put_object(Bucket=self.bucket_name,
- Key='foo.txt',
- Body=b'hello world',
- **extra_args)
- self.addCleanup(self.delete_object, 'foo.txt')
- transfer = self.create_s3_transfer()
-
- download_path = os.path.join(self.files.rootdir, 'downloaded.txt')
- transfer.download_file(self.bucket_name, 'foo.txt',
- download_path, extra_args=extra_args)
- with open(download_path, 'rb') as f:
- self.assertEqual(f.read(), b'hello world')
-
- def test_progress_callback_on_download(self):
- self.amount_seen = 0
- lock = threading.Lock()
-
- def progress_callback(amount):
- with lock:
- self.amount_seen += amount
-
- transfer = self.create_s3_transfer()
- filename = self.files.create_file_with_size(
- '20mb.txt', filesize=20 * 1024 * 1024)
- with open(filename, 'rb') as f:
- self.client.put_object(Bucket=self.bucket_name,
- Key='20mb.txt', Body=f)
- self.addCleanup(self.delete_object, '20mb.txt')
-
- download_path = os.path.join(self.files.rootdir, 'downloaded.txt')
- transfer.download_file(self.bucket_name, '20mb.txt',
- download_path, callback=progress_callback)
-
- self.assertEqual(self.amount_seen, 20 * 1024 * 1024)
-
- def test_download_below_threshold(self):
- transfer = self.create_s3_transfer()
-
- filename = self.files.create_file_with_size(
- 'foo.txt', filesize=1024 * 1024)
- with open(filename, 'rb') as f:
- self.client.put_object(Bucket=self.bucket_name,
- Key='foo.txt',
- Body=f)
- self.addCleanup(self.delete_object, 'foo.txt')
-
- download_path = os.path.join(self.files.rootdir, 'downloaded.txt')
- transfer.download_file(self.bucket_name, 'foo.txt',
- download_path)
- assert_files_equal(filename, download_path)
-
- def test_download_above_threshold(self):
- transfer = self.create_s3_transfer()
-
- filename = self.files.create_file_with_size(
- 'foo.txt', filesize=20 * 1024 * 1024)
- with open(filename, 'rb') as f:
- self.client.put_object(Bucket=self.bucket_name,
- Key='foo.txt',
- Body=f)
- self.addCleanup(self.delete_object, 'foo.txt')
-
- download_path = os.path.join(self.files.rootdir, 'downloaded.txt')
- transfer.download_file(self.bucket_name, 'foo.txt',
- download_path)
- assert_files_equal(filename, download_path)