From 98c0c0bca27d36258170904a2e7ddff6e53c1855 Mon Sep 17 00:00:00 2001
From: Fabio Alessandro Locati <fabio@locati.cc>
Date: Thu, 25 Feb 2016 00:05:38 +0100
Subject: [PATCH] 0.0.1-3 Improve spec file and make the patch way more usable

---
 python-s3transfer.spec               |  14 +-
 s3transfer-remove_online_tests.patch | 267 ++-------------------------
 2 files changed, 20 insertions(+), 261 deletions(-)

diff --git a/python-s3transfer.spec b/python-s3transfer.spec
index 508ed39..ace6afb 100644
--- a/python-s3transfer.spec
+++ b/python-s3transfer.spec
@@ -9,14 +9,11 @@
 %global with_python3 1
 %endif
 
-# https://github.com/boto/s3transfer/issues/8
-%global patched_tests 1
-
 %global pypi_name s3transfer
 
 Name:           python-%{pypi_name}
 Version:        0.0.1
-Release:        2%{?dist}
+Release:        3%{?dist}
 Summary:        An Amazon S3 Transfer Manager
 
 License:        ASL 2.0
@@ -25,11 +22,13 @@ URL:            https://github.com/boto/s3transfer
 # - https://github.com/boto/s3transfer/issues/6
 # - https://github.com/boto/s3transfer/issues/7
 Source0:        https://github.com/boto/%{pypi_name}/archive/%{version}.tar.gz
+# This patch is the result of https://github.com/boto/s3transfer/issues/8
 Patch0:         s3transfer-remove_online_tests.patch
 BuildArch:      noarch
 
 BuildRequires:  python2-devel
 BuildRequires:  python-setuptools
+# The following are for tests
 BuildRequires:  python-nose
 BuildRequires:  python-mock
 BuildRequires:  python-wheel
@@ -38,6 +37,7 @@ BuildRequires:  python2-botocore
 %if 0%{?with_python3} 
 BuildRequires:  python3-devel
 BuildRequires:  python3-setuptools
+# The following are for tests
 BuildRequires:  python3-nose
 BuildRequires:  python3-mock
 BuildRequires:  python3-wheel
@@ -68,9 +68,7 @@ S3transfer is a Python library for managing Amazon S3 transfers.
 
 %prep
 %setup -q -n %{pypi_name}-%{version}
-%if 0%{?patched_tests}
 %patch0 -p1
-%endif # patched_tests
 
 %build
 %py2_build
@@ -106,6 +104,10 @@ S3transfer is a Python library for managing Amazon S3 transfers.
 %endif # with_python3
 
 %changelog
+* Wed Feb 24 2016 Fabio Alessandro Locati <fabio@locati.cc> - 0.0.1-3
+- Hugely reduce and clean the patch
+- Cleanup the spec a little bit
+
 * Tue Feb 23 2016 Fabio Alessandro Locati <fabio@locati.cc> - 0.0.1-2
 - Add patch to remove tests needing web connection
 
diff --git a/s3transfer-remove_online_tests.patch b/s3transfer-remove_online_tests.patch
index 8995735..4614a00 100644
--- a/s3transfer-remove_online_tests.patch
+++ b/s3transfer-remove_online_tests.patch
@@ -1,255 +1,12 @@
-diff -Naur s3transfer-0.0.1.orig/tests/integration/test_s3transfer.py s3transfer-0.0.1/tests/integration/test_s3transfer.py
---- s3transfer-0.0.1.orig/tests/integration/test_s3transfer.py	2016-02-23 14:59:22.338749531 +0100
-+++ s3transfer-0.0.1/tests/integration/test_s3transfer.py	2016-02-23 15:01:05.566034716 +0100
-@@ -102,251 +102,3 @@
-         f.full_path('foo/bar.txt') -> /tmp/asdfasd/foo/bar.txt
-         """
-         return os.path.join(self.rootdir, filename)
--
--
--class TestS3Transfers(unittest.TestCase):
--    """Tests for the high level s3transfer module."""
--
--    @classmethod
--    def setUpClass(cls):
--        cls.region = 'us-west-2'
--        cls.session = botocore.session.get_session()
--        cls.client = cls.session.create_client('s3', cls.region)
--        cls.bucket_name = random_bucket_name()
--        cls.client.create_bucket(
--            Bucket=cls.bucket_name,
--            CreateBucketConfiguration={'LocationConstraint': cls.region})
--
--    def setUp(self):
--        self.files = FileCreator()
--
--    def tearDown(self):
--        self.files.remove_all()
--
--    @classmethod
--    def tearDownClass(cls):
--        cls.client.delete_bucket(Bucket=cls.bucket_name)
--
--    def delete_object(self, key):
--        self.client.delete_object(
--            Bucket=self.bucket_name,
--            Key=key)
--
--    def object_exists(self, key):
--        self.client.head_object(Bucket=self.bucket_name,
--                                Key=key)
--        return True
--
--    def create_s3_transfer(self, config=None):
--        return s3transfer.S3Transfer(self.client,
--                                            config=config)
--
--    def assert_has_public_read_acl(self, response):
--        grants = response['Grants']
--        public_read = [g['Grantee'].get('URI', '') for g in grants
--                       if g['Permission'] == 'READ']
--        self.assertIn('groups/global/AllUsers', public_read[0])
--
--    def test_upload_below_threshold(self):
--        config = s3transfer.TransferConfig(
--            multipart_threshold=2 * 1024 * 1024)
--        transfer = self.create_s3_transfer(config)
--        filename = self.files.create_file_with_size(
--            'foo.txt', filesize=1024 * 1024)
--        transfer.upload_file(filename, self.bucket_name,
--                             'foo.txt')
--        self.addCleanup(self.delete_object, 'foo.txt')
--
--        self.assertTrue(self.object_exists('foo.txt'))
--
--    def test_upload_above_threshold(self):
--        config = s3transfer.TransferConfig(
--            multipart_threshold=2 * 1024 * 1024)
--        transfer = self.create_s3_transfer(config)
--        filename = self.files.create_file_with_size(
--            '20mb.txt', filesize=20 * 1024 * 1024)
--        transfer.upload_file(filename, self.bucket_name,
--                             '20mb.txt')
--        self.addCleanup(self.delete_object, '20mb.txt')
--        self.assertTrue(self.object_exists('20mb.txt'))
--
--    def test_upload_file_above_threshold_with_acl(self):
--        config = s3transfer.TransferConfig(
--            multipart_threshold=5 * 1024 * 1024)
--        transfer = self.create_s3_transfer(config)
--        filename = self.files.create_file_with_size(
--            '6mb.txt', filesize=6 * 1024 * 1024)
--        extra_args = {'ACL': 'public-read'}
--        transfer.upload_file(filename, self.bucket_name,
--                             '6mb.txt', extra_args=extra_args)
--        self.addCleanup(self.delete_object, '6mb.txt')
--
--        self.assertTrue(self.object_exists('6mb.txt'))
--        response = self.client.get_object_acl(
--            Bucket=self.bucket_name, Key='6mb.txt')
--        self.assert_has_public_read_acl(response)
--
--    def test_upload_file_above_threshold_with_ssec(self):
--        key_bytes = os.urandom(32)
--        extra_args = {
--            'SSECustomerKey': key_bytes,
--            'SSECustomerAlgorithm': 'AES256',
--        }
--        config = s3transfer.TransferConfig(
--            multipart_threshold=5 * 1024 * 1024)
--        transfer = self.create_s3_transfer(config)
--        filename = self.files.create_file_with_size(
--            '6mb.txt', filesize=6 * 1024 * 1024)
--        transfer.upload_file(filename, self.bucket_name,
--                             '6mb.txt', extra_args=extra_args)
--        self.addCleanup(self.delete_object, '6mb.txt')
--        # A head object will fail if it has a customer key
--        # associated with it and it's not provided in the HeadObject
--        # request so we can use this to verify our functionality.
--        response = self.client.head_object(
--            Bucket=self.bucket_name,
--            Key='6mb.txt', **extra_args)
--        self.assertEqual(response['SSECustomerAlgorithm'], 'AES256')
--
--    def test_progress_callback_on_upload(self):
--        self.amount_seen = 0
--        lock = threading.Lock()
--
--        def progress_callback(amount):
--            with lock:
--                self.amount_seen += amount
--
--        transfer = self.create_s3_transfer()
--        filename = self.files.create_file_with_size(
--            '20mb.txt', filesize=20 * 1024 * 1024)
--        transfer.upload_file(filename, self.bucket_name,
--                             '20mb.txt', callback=progress_callback)
--        self.addCleanup(self.delete_object, '20mb.txt')
--
--        # The callback should have been called enough times such that
--        # the total amount of bytes we've seen (via the "amount"
--        # arg to the callback function) should be the size
--        # of the file we uploaded.
--        self.assertEqual(self.amount_seen, 20 * 1024 * 1024)
--
--    def test_callback_called_once_with_sigv4(self):
--        # Verify #98, where the callback was being invoked
--        # twice when using signature version 4.
--        self.amount_seen = 0
--        lock = threading.Lock()
--
--        def progress_callback(amount):
--            with lock:
--                self.amount_seen += amount
--
--        client = self.session.create_client(
--            's3', self.region,
--            config=Config(signature_version='s3v4'))
--        transfer = s3transfer.S3Transfer(client)
--        filename = self.files.create_file_with_size(
--            '10mb.txt', filesize=10 * 1024 * 1024)
--        transfer.upload_file(filename, self.bucket_name,
--                             '10mb.txt', callback=progress_callback)
--        self.addCleanup(self.delete_object, '10mb.txt')
--
--        self.assertEqual(self.amount_seen, 10 * 1024 * 1024)
--
--    def test_can_send_extra_params_on_upload(self):
--        transfer = self.create_s3_transfer()
--        filename = self.files.create_file_with_size('foo.txt', filesize=1024)
--        transfer.upload_file(filename, self.bucket_name,
--                             'foo.txt', extra_args={'ACL': 'public-read'})
--        self.addCleanup(self.delete_object, 'foo.txt')
--
--        response = self.client.get_object_acl(
--            Bucket=self.bucket_name, Key='foo.txt')
--        self.assert_has_public_read_acl(response)
--
--    def test_can_configure_threshold(self):
--        config = s3transfer.TransferConfig(
--            multipart_threshold=6 * 1024 * 1024
--        )
--        transfer = self.create_s3_transfer(config)
--        filename = self.files.create_file_with_size(
--            'foo.txt', filesize=8 * 1024 * 1024)
--        transfer.upload_file(filename, self.bucket_name,
--                             'foo.txt')
--        self.addCleanup(self.delete_object, 'foo.txt')
--
--        self.assertTrue(self.object_exists('foo.txt'))
--
--    def test_can_send_extra_params_on_download(self):
--        # We're picking the customer provided sse feature
--        # of S3 to test the extra_args functionality of
--        # S3.
--        key_bytes = os.urandom(32)
--        extra_args = {
--            'SSECustomerKey': key_bytes,
--            'SSECustomerAlgorithm': 'AES256',
--        }
--        self.client.put_object(Bucket=self.bucket_name,
--                               Key='foo.txt',
--                               Body=b'hello world',
--                               **extra_args)
--        self.addCleanup(self.delete_object, 'foo.txt')
--        transfer = self.create_s3_transfer()
--
--        download_path = os.path.join(self.files.rootdir, 'downloaded.txt')
--        transfer.download_file(self.bucket_name, 'foo.txt',
--                               download_path, extra_args=extra_args)
--        with open(download_path, 'rb') as f:
--            self.assertEqual(f.read(), b'hello world')
--
--    def test_progress_callback_on_download(self):
--        self.amount_seen = 0
--        lock = threading.Lock()
--
--        def progress_callback(amount):
--            with lock:
--                self.amount_seen += amount
--
--        transfer = self.create_s3_transfer()
--        filename = self.files.create_file_with_size(
--            '20mb.txt', filesize=20 * 1024 * 1024)
--        with open(filename, 'rb') as f:
--            self.client.put_object(Bucket=self.bucket_name,
--                                   Key='20mb.txt', Body=f)
--        self.addCleanup(self.delete_object, '20mb.txt')
--
--        download_path = os.path.join(self.files.rootdir, 'downloaded.txt')
--        transfer.download_file(self.bucket_name, '20mb.txt',
--                               download_path, callback=progress_callback)
--
--        self.assertEqual(self.amount_seen, 20 * 1024 * 1024)
--
--    def test_download_below_threshold(self):
--        transfer = self.create_s3_transfer()
--
--        filename = self.files.create_file_with_size(
--            'foo.txt', filesize=1024 * 1024)
--        with open(filename, 'rb') as f:
--            self.client.put_object(Bucket=self.bucket_name,
--                                   Key='foo.txt',
--                                   Body=f)
--            self.addCleanup(self.delete_object, 'foo.txt')
--
--        download_path = os.path.join(self.files.rootdir, 'downloaded.txt')
--        transfer.download_file(self.bucket_name, 'foo.txt',
--                               download_path)
--        assert_files_equal(filename, download_path)
--
--    def test_download_above_threshold(self):
--        transfer = self.create_s3_transfer()
--
--        filename = self.files.create_file_with_size(
--            'foo.txt', filesize=20 * 1024 * 1024)
--        with open(filename, 'rb') as f:
--            self.client.put_object(Bucket=self.bucket_name,
--                                   Key='foo.txt',
--                                   Body=f)
--            self.addCleanup(self.delete_object, 'foo.txt')
--
--        download_path = os.path.join(self.files.rootdir, 'downloaded.txt')
--        transfer.download_file(self.bucket_name, 'foo.txt',
--                               download_path)
--        assert_files_equal(filename, download_path)
+diff -Naur s3transfer-0.0.1.orig/setup.py s3transfer-0.0.1/setup.py
+--- s3transfer-0.0.1.orig/setup.py	2016-02-24 18:39:51.714517811 +0100
++++ s3transfer-0.0.1/setup.py	2016-02-24 23:47:01.342562454 +0100
+@@ -33,7 +33,7 @@
+     long_description=open('README.rst').read(),
+     author='Amazon Web Services',
+     url='https://github.com/boto/s3transfer',
+-    packages=find_packages(exclude=['tests*']),
++    packages=find_packages(exclude=['tests/unit']),
+     install_requires=requires,
+     extras_require={
+         ':python_version=="2.6" or python_version=="2.7"': [