From 1e94498d9d548cbea6466a45dafa3b919c65bd1f Mon Sep 17 00:00:00 2001 From: Benjamin Yolken Date: Fri, 3 Feb 2017 13:32:23 -0800 Subject: [PATCH 01/12] Add initial implementation of S3Cache --- setup.py | 1 + superset/assets/version_info.json | 2 +- superset/results_backends.py | 114 ++++++++++++++++++++++++++++-- 3 files changed, 110 insertions(+), 7 deletions(-) diff --git a/setup.py b/setup.py index 20dcbc16efebd..d679cb7a32056 100644 --- a/setup.py +++ b/setup.py @@ -42,6 +42,7 @@ def get_git_sha(): zip_safe=False, scripts=['superset/bin/superset'], install_requires=[ + 'boto3==1.4.4', 'celery==3.1.23', 'cryptography==1.5.3', 'flask-appbuilder==1.8.1', diff --git a/superset/assets/version_info.json b/superset/assets/version_info.json index cff95f6d46e0f..4be1844c207e6 100644 --- a/superset/assets/version_info.json +++ b/superset/assets/version_info.json @@ -1 +1 @@ -{"GIT_SHA": "2d08e240285288b71df98747ddd4b6cca3220c5a", "version": "0.15.2"} \ No newline at end of file +{"GIT_SHA": "0f7189b859f4a782fd43af694012029645f81b44", "version": "0.15.4"} \ No newline at end of file diff --git a/superset/results_backends.py b/superset/results_backends.py index 714ed66b15c6f..ec0d5b28f0538 100644 --- a/superset/results_backends.py +++ b/superset/results_backends.py @@ -7,24 +7,106 @@ from __future__ import print_function from __future__ import unicode_literals +import cPickle +import logging +import StringIO + +import boto3 from werkzeug.contrib.cache import BaseCache +from superset import app + +config = app.config + class S3Cache(BaseCache): - """S3 cache""" + """S3 cache implementation. + + Adapted from examples in + https://github.com/pallets/werkzeug/blob/master/werkzeug/contrib/cache.py. + + Timeout parameters are ignored as S3 doesn't support key-level expiration. To expire + keys, set up an expiration policy as described in + https://aws.amazon.com/blogs/aws/amazon-s3-object-expiration/. + """ def __init__(self, default_timeout=300): self.default_timeout = default_timeout + self.s3_client = boto3.client('s3') + self.bucket = self.s3_resource.Bucket(config.get('S3_CACHE_BUCKET')) + self.key_prefix = config.get('S3_CACHE_KEY_PREFIX') + def get(self, key): - return None + """Look up key in the cache and return the value for it. + :param key: the key to be looked up. + :returns: The value if it exists and is readable, else ``None``. + """ + if not self._key_exists(key): + return None + else: + value_file = StringIO.StringIO() + + try: + self.s3_client.download_fileobj(self.bucket, self._full_s3_key(key), value_file) + except Exception as e: + logging.warn('Exception while trying to get %s: %s', key, e) + return None + else: + value_file.seek(0) + return cPickle.load(value_file) def delete(self, key): - return True + """Delete `key` from the cache. + :param key: the key to delete. + :returns: Whether the key existed and has been deleted. + :rtype: boolean + """ + if not self._key_exists(key): + return False + else: + try: + response = self.s3_client.delete_objects( + Bucket=self.bucket, + Delete={ + 'Objects': [ + { + 'Key': self._full_s3_key(key) + } + ] + } + ) + except Exception as e: + logging.warn('Exception while trying to delete %s: %s', key, e) + return False + else: + return True def set(self, key, value, timeout=None): - return True + """Add a new key/value to the cache (overwrites value, if key already + exists in the cache). + :param key: the key to set + :param value: the value for the key + :param timeout: the cache timeout for the key in seconds (if not + specified, it uses the default timeout). A timeout of + 0 idicates that the cache never expires. + :returns: ``True`` if key has been updated, ``False`` for backend + errors. Pickling errors, however, will raise a subclass of + ``pickle.PickleError``. + :rtype: boolean + """ + value_file = StringIO.StringIO() + cPickle.dump(value, value_file) + + try: + value_file.seek(0) + self.s3_client.upload_fileobj(value_file, self.bucket, self._full_s3_key(key)) + except Exception as e: + logging.warn('Exception while trying to set %s: %s', key, e) + return False + else: + return True def add(self, key, value, timeout=None): """Works like :meth:`set` but does not overwrite the values of already @@ -38,7 +120,10 @@ def add(self, key, value, timeout=None): existing keys. :rtype: boolean """ - return True + if self._key_exists(key): + return False + else: + return self.set(key, value, timeout=timeout) def clear(self): """Clears the cache. Keep in mind that not all caches support @@ -46,4 +131,21 @@ def clear(self): :returns: Whether the cache has been cleared. :rtype: boolean """ - return True + return False + + def _full_s3_key(self, key): + """Convert a cache key to a full S3 key, including the key prefix.""" + return '%s%s' % (self.key_prefix, key) + + def _key_exists(self, key): + """Determine whether the given key exists in the bucket.""" + try: + response = self.s3_client.head_object( + Bucket=self.bucket, + Key=self._full_s3_key(key) + ) + except Exception as e: + # head_object throws an exception when object doesn't exist + return False + else: + return True From 1546b1ae716d47dad7c583100dbf73665d88aa3b Mon Sep 17 00:00:00 2001 From: Benjamin Yolken Date: Fri, 3 Feb 2017 15:40:18 -0800 Subject: [PATCH 02/12] Add tests for S3Cache --- superset/assets/version_info.json | 2 +- superset/results_backends.py | 4 +- tests/results_backends_tests.py | 120 ++++++++++++++++++++++++++++++ 3 files changed, 123 insertions(+), 3 deletions(-) create mode 100644 tests/results_backends_tests.py diff --git a/superset/assets/version_info.json b/superset/assets/version_info.json index 4be1844c207e6..5fd2d482054ce 100644 --- a/superset/assets/version_info.json +++ b/superset/assets/version_info.json @@ -1 +1 @@ -{"GIT_SHA": "0f7189b859f4a782fd43af694012029645f81b44", "version": "0.15.4"} \ No newline at end of file +{"GIT_SHA": "1e94498d9d548cbea6466a45dafa3b919c65bd1f", "version": "0.15.4"} \ No newline at end of file diff --git a/superset/results_backends.py b/superset/results_backends.py index ec0d5b28f0538..614d28c2f6b09 100644 --- a/superset/results_backends.py +++ b/superset/results_backends.py @@ -35,7 +35,7 @@ def __init__(self, default_timeout=300): self.default_timeout = default_timeout self.s3_client = boto3.client('s3') - self.bucket = self.s3_resource.Bucket(config.get('S3_CACHE_BUCKET')) + self.bucket = config.get('S3_CACHE_BUCKET') self.key_prefix = config.get('S3_CACHE_KEY_PREFIX') def get(self, key): @@ -140,7 +140,7 @@ def _full_s3_key(self, key): def _key_exists(self, key): """Determine whether the given key exists in the bucket.""" try: - response = self.s3_client.head_object( + self.s3_client.head_object( Bucket=self.bucket, Key=self._full_s3_key(key) ) diff --git a/tests/results_backends_tests.py b/tests/results_backends_tests.py new file mode 100644 index 0000000000000..8ebbb53a77e4c --- /dev/null +++ b/tests/results_backends_tests.py @@ -0,0 +1,120 @@ +import cPickle +import mock + +from superset import app, results_backends +from .base_tests import SupersetTestCase + +app.config['S3_CACHE_BUCKET'] = 'test-bucket' +app.config['S3_CACHE_KEY_PREFIX'] = 'test-prefix/' + + +class ResultsBackendsTests(SupersetTestCase): + requires_examples = False + + @mock.patch('boto3.client') + def setUp(self, mock_boto3_client): + self.mock_boto3_client = mock_boto3_client + self.mock_s3_client = mock.MagicMock() + + self.mock_boto3_client.return_value = self.mock_s3_client + + self.s3_cache = results_backends.S3Cache() + self.s3_cache._key_exists = ResultsBackendsTests._mock_key_exists + + @staticmethod + def _mock_download_fileobj(bucket, key, value_file): + value_file.write(cPickle.dumps('%s:%s' % (bucket, key))) + + @staticmethod + def _mock_key_exists(key): + return key == 'test-key' + + def test_s3_cache_initilization(self): + self.mock_boto3_client.assert_called_with('s3') + + def test_s3_cache_set(self): + result = self.s3_cache.set('test-key', 'test-value') + + self.assertTrue(result) + self.mock_s3_client.upload_fileobj.assert_called_once() + + call_args = self.mock_s3_client.upload_fileobj.call_args_list[0][0] + + self.assertEquals(cPickle.loads(call_args[0].getvalue()), 'test-value') + self.assertEquals(call_args[1], 'test-bucket') + self.assertEquals(call_args[2], 'test-prefix/test-key') + + def test_s3_cache_exception(self): + self.mock_s3_client.upload_fileobj.side_effect = Exception('Something bad happened!') + result = self.s3_cache.set('test-key', 'test-value') + + self.assertFalse(result) + self.mock_s3_client.upload_fileobj.assert_called_once() + + def test_s3_cache_get_exists(self): + self.mock_s3_client.download_fileobj.side_effect = ( + ResultsBackendsTests._mock_download_fileobj) + result = self.s3_cache.get('test-key') + + self.assertEquals(result, 'test-bucket:test-prefix/test-key') + self.mock_s3_client.download_fileobj.assert_called_once() + + def test_s3_cache_get_does_not_exist(self): + result = self.s3_cache.get('test-key2') + + self.assertEquals(result, None) + self.assertFalse(self.mock_s3_client.download_fileobj.called) + + def test_s3_cache_get_s3_exception(self): + self.mock_s3_client.download_fileobj.side_effect = Exception('Something bad happened') + result = self.s3_cache.get('test-key') + + self.assertEquals(result, None) + self.mock_s3_client.download_fileobj.assert_called_once() + + def test_s3_cache_delete_exists(self): + result = self.s3_cache.delete('test-key') + + self.assertTrue(result) + self.mock_s3_client.delete_objects.assert_called_once_with( + Bucket='test-bucket', + Delete={'Objects': [{'Key': 'test-prefix/test-key'}]} + ) + + def test_s3_cache_delete_does_not_exist(self): + result = self.s3_cache.delete('test-key2') + + self.assertFalse(result) + self.assertFalse(self.mock_s3_client.delete_objects.called) + + def test_s3_cache_delete_exception(self): + self.mock_s3_client.delete_objects.side_effect = Exception('Something bad happened') + result = self.s3_cache.delete('test-key') + + self.assertFalse(result) + self.mock_s3_client.delete_objects.assert_called_once() + + def test_s3_cache_add_exists(self): + result = self.s3_cache.add('test-key', 'test-value') + + self.assertFalse(result) + self.assertFalse(self.mock_s3_client.upload_fileobj.called) + + def test_s3_cache_add_does_not_exist(self): + result = self.s3_cache.add('test-key2', 'test-value') + + self.assertTrue(result) + self.mock_s3_client.upload_fileobj.assert_called_once() + + call_args = self.mock_s3_client.upload_fileobj.call_args_list[0][0] + + self.assertEquals(cPickle.loads(call_args[0].getvalue()), 'test-value') + self.assertEquals(call_args[1], 'test-bucket') + self.assertEquals(call_args[2], 'test-prefix/test-key2') + + def test_s3_cache_add_exception(self): + self.mock_s3_client.upload_fileobj.side_effect = Exception('Something bad happened') + result = self.s3_cache.add('test-key2', 'test-value') + + self.assertFalse(result) + self.mock_s3_client.upload_fileobj.assert_called_once() From 00b6b0ac68571df1a7c8e16fd0e79c64cbfe0a60 Mon Sep 17 00:00:00 2001 From: Benjamin Yolken Date: Fri, 3 Feb 2017 15:43:43 -0800 Subject: [PATCH 03/12] Misc. style tweaks to S3Cache changes and tests --- superset/results_backends.py | 1 + tests/results_backends_tests.py | 3 ++- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/superset/results_backends.py b/superset/results_backends.py index 614d28c2f6b09..ccf755fc7d146 100644 --- a/superset/results_backends.py +++ b/superset/results_backends.py @@ -35,6 +35,7 @@ def __init__(self, default_timeout=300): self.default_timeout = default_timeout self.s3_client = boto3.client('s3') + self.bucket = config.get('S3_CACHE_BUCKET') self.key_prefix = config.get('S3_CACHE_KEY_PREFIX') diff --git a/tests/results_backends_tests.py b/tests/results_backends_tests.py index 8ebbb53a77e4c..f628ebdbf975a 100644 --- a/tests/results_backends_tests.py +++ b/tests/results_backends_tests.py @@ -1,4 +1,5 @@ import cPickle + import mock from superset import app, results_backends @@ -44,7 +45,7 @@ def test_s3_cache_set(self): self.assertEquals(call_args[1], 'test-bucket') self.assertEquals(call_args[2], 'test-prefix/test-key') - def test_s3_cache_exception(self): + def test_s3_cache_set_exception(self): self.mock_s3_client.upload_fileobj.side_effect = Exception('Something bad happened!') result = self.s3_cache.set('test-key', 'test-value') From f85481d51b3481d7e0ee7f9b73991fb5e2b219ef Mon Sep 17 00:00:00 2001 From: Benjamin Yolken Date: Fri, 3 Feb 2017 15:48:33 -0800 Subject: [PATCH 04/12] Fix long lines in superset/results_backends.py --- superset/results_backends.py | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/superset/results_backends.py b/superset/results_backends.py index ccf755fc7d146..3f5e3ec100ebe 100644 --- a/superset/results_backends.py +++ b/superset/results_backends.py @@ -26,8 +26,8 @@ class S3Cache(BaseCache): Adapted from examples in https://github.com/pallets/werkzeug/blob/master/werkzeug/contrib/cache.py. - Timeout parameters are ignored as S3 doesn't support key-level expiration. To expire - keys, set up an expiration policy as described in + Timeout parameters are ignored as S3 doesn't support key-level expiration. + To expire keys, set up an expiration policy as described in https://aws.amazon.com/blogs/aws/amazon-s3-object-expiration/. """ @@ -50,7 +50,11 @@ def get(self, key): value_file = StringIO.StringIO() try: - self.s3_client.download_fileobj(self.bucket, self._full_s3_key(key), value_file) + self.s3_client.download_fileobj( + self.bucket, + self._full_s3_key(key), + value_file + ) except Exception as e: logging.warn('Exception while trying to get %s: %s', key, e) return None @@ -102,7 +106,11 @@ def set(self, key, value, timeout=None): try: value_file.seek(0) - self.s3_client.upload_fileobj(value_file, self.bucket, self._full_s3_key(key)) + self.s3_client.upload_fileobj( + value_file, + self.bucket, + self._full_s3_key(key) + ) except Exception as e: logging.warn('Exception while trying to set %s: %s', key, e) return False From 6a0a1af67ebfc11fde51eb3d77db7b9ac6569c3c Mon Sep 17 00:00:00 2001 From: Benjamin Yolken Date: Fri, 3 Feb 2017 15:59:18 -0800 Subject: [PATCH 05/12] Fix misc. style issues --- superset/results_backends.py | 21 +++++++++++++-------- 1 file changed, 13 insertions(+), 8 deletions(-) diff --git a/superset/results_backends.py b/superset/results_backends.py index 3f5e3ec100ebe..bb58d090d31a3 100644 --- a/superset/results_backends.py +++ b/superset/results_backends.py @@ -41,6 +41,7 @@ def __init__(self, default_timeout=300): def get(self, key): """Look up key in the cache and return the value for it. + :param key: the key to be looked up. :returns: The value if it exists and is readable, else ``None``. """ @@ -64,6 +65,7 @@ def get(self, key): def delete(self, key): """Delete `key` from the cache. + :param key: the key to delete. :returns: Whether the key existed and has been deleted. :rtype: boolean @@ -72,7 +74,7 @@ def delete(self, key): return False else: try: - response = self.s3_client.delete_objects( + self.s3_client.delete_objects( Bucket=self.bucket, Delete={ 'Objects': [ @@ -89,8 +91,10 @@ def delete(self, key): return True def set(self, key, value, timeout=None): - """Add a new key/value to the cache (overwrites value, if key already - exists in the cache). + """Add a new key/value to the cache. + + If the key already exists, the existing value is overwritten. + :param key: the key to set :param value: the value for the key :param timeout: the cache timeout for the key in seconds (if not @@ -118,8 +122,8 @@ def set(self, key, value, timeout=None): return True def add(self, key, value, timeout=None): - """Works like :meth:`set` but does not overwrite the values of already - existing keys. + """Works like :meth:`set` but does not overwrite existing values. + :param key: the key to set :param value: the value for the key :param timeout: the cache timeout for the key in seconds (if not @@ -135,8 +139,9 @@ def add(self, key, value, timeout=None): return self.set(key, value, timeout=timeout) def clear(self): - """Clears the cache. Keep in mind that not all caches support - completely clearing the cache. + """Clears the cache. + + Keep in mind that not all caches support completely clearing the cache. :returns: Whether the cache has been cleared. :rtype: boolean """ @@ -153,7 +158,7 @@ def _key_exists(self, key): Bucket=self.bucket, Key=self._full_s3_key(key) ) - except Exception as e: + except Exception: # head_object throws an exception when object doesn't exist return False else: From 0ee1abf31a021d2c9e40d9b3c321fce14d4d7179 Mon Sep 17 00:00:00 2001 From: Benjamin Yolken Date: Fri, 3 Feb 2017 16:21:33 -0800 Subject: [PATCH 06/12] Misc. fixes in response to code review feedback --- superset/assets/version_info.json | 1 - superset/results_backends.py | 12 +++++++++--- 2 files changed, 9 insertions(+), 4 deletions(-) delete mode 100644 superset/assets/version_info.json diff --git a/superset/assets/version_info.json b/superset/assets/version_info.json deleted file mode 100644 index 5fd2d482054ce..0000000000000 --- a/superset/assets/version_info.json +++ /dev/null @@ -1 +0,0 @@ -{"GIT_SHA": "1e94498d9d548cbea6466a45dafa3b919c65bd1f", "version": "0.15.4"} \ No newline at end of file diff --git a/superset/results_backends.py b/superset/results_backends.py index bb58d090d31a3..6fb1386690bea 100644 --- a/superset/results_backends.py +++ b/superset/results_backends.py @@ -57,7 +57,9 @@ def get(self, key): value_file ) except Exception as e: - logging.warn('Exception while trying to get %s: %s', key, e) + logging.warn('Error while trying to get key %s', key) + logging.exception(e) + return None else: value_file.seek(0) @@ -85,7 +87,9 @@ def delete(self, key): } ) except Exception as e: - logging.warn('Exception while trying to delete %s: %s', key, e) + logging.warn('Error while trying to delete key %s', key) + logging.exception(e) + return False else: return True @@ -116,7 +120,9 @@ def set(self, key, value, timeout=None): self._full_s3_key(key) ) except Exception as e: - logging.warn('Exception while trying to set %s: %s', key, e) + logging.warn('Error while trying to set key %s', key) + logging.exception(e) + return False else: return True From 167ed33bba160f091e613bc2a351ca5ddc7c8189 Mon Sep 17 00:00:00 2001 From: Benjamin Yolken Date: Fri, 3 Feb 2017 16:29:28 -0800 Subject: [PATCH 07/12] Fix name of test in results_backends_tests module --- tests/results_backends_tests.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/results_backends_tests.py b/tests/results_backends_tests.py index f628ebdbf975a..041ebd09f190b 100644 --- a/tests/results_backends_tests.py +++ b/tests/results_backends_tests.py @@ -66,7 +66,7 @@ def test_s3_cache_get_does_not_exist(self): self.assertEquals(result, None) self.assertFalse(self.mock_s3_client.download_fileobj.called) - def test_s3_cache_get_s3_exception(self): + def test_s3_cache_get_exception(self): self.mock_s3_client.download_fileobj.side_effect = Exception('Something bad happened') result = self.s3_cache.get('test-key') From ce50e6e4fe2147cdf61288d687d49e174f3b7b1d Mon Sep 17 00:00:00 2001 From: Benjamin Yolken Date: Fri, 3 Feb 2017 21:27:21 -0800 Subject: [PATCH 08/12] Fix python3 cPickle import errors --- superset/results_backends.py | 10 +++++++--- tests/results_backends_tests.py | 11 +++++++---- 2 files changed, 14 insertions(+), 7 deletions(-) diff --git a/superset/results_backends.py b/superset/results_backends.py index 6fb1386690bea..cd3d8de16fce5 100644 --- a/superset/results_backends.py +++ b/superset/results_backends.py @@ -7,7 +7,11 @@ from __future__ import print_function from __future__ import unicode_literals -import cPickle +try: + import cPickle as pickle +except: + import pickle + import logging import StringIO @@ -63,7 +67,7 @@ def get(self, key): return None else: value_file.seek(0) - return cPickle.load(value_file) + return pickle.load(value_file) def delete(self, key): """Delete `key` from the cache. @@ -110,7 +114,7 @@ def set(self, key, value, timeout=None): :rtype: boolean """ value_file = StringIO.StringIO() - cPickle.dump(value, value_file) + pickle.dump(value, value_file) try: value_file.seek(0) diff --git a/tests/results_backends_tests.py b/tests/results_backends_tests.py index 041ebd09f190b..9f4e8dc6b26b8 100644 --- a/tests/results_backends_tests.py +++ b/tests/results_backends_tests.py @@ -1,4 +1,7 @@ -import cPickle +try: + import cPickle as pickle +except: + import pickle import mock @@ -24,7 +27,7 @@ def setUp(self, mock_boto3_client): @staticmethod def _mock_download_fileobj(bucket, key, value_file): - value_file.write(cPickle.dumps('%s:%s' % (bucket, key))) + value_file.write(pickle.dumps('%s:%s' % (bucket, key))) @staticmethod def _mock_key_exists(key): @@ -41,7 +44,7 @@ def test_s3_cache_set(self): call_args = self.mock_s3_client.upload_fileobj.call_args_list[0][0] - self.assertEquals(cPickle.loads(call_args[0].getvalue()), 'test-value') + self.assertEquals(pickle.loads(call_args[0].getvalue()), 'test-value') self.assertEquals(call_args[1], 'test-bucket') self.assertEquals(call_args[2], 'test-prefix/test-key') @@ -109,7 +112,7 @@ def test_s3_cache_add_does_not_exist(self): call_args = self.mock_s3_client.upload_fileobj.call_args_list[0][0] - self.assertEquals(cPickle.loads(call_args[0].getvalue()), 'test-value') + self.assertEquals(pickle.loads(call_args[0].getvalue()), 'test-value') self.assertEquals(call_args[1], 'test-bucket') self.assertEquals(call_args[2], 'test-prefix/test-key2') From b927ff6eef7e948be1f2a7e828f0d7de9458d2c2 Mon Sep 17 00:00:00 2001 From: Benjamin Yolken Date: Fri, 3 Feb 2017 21:28:42 -0800 Subject: [PATCH 09/12] Fix indentation errors in results_backends module --- superset/results_backends.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/superset/results_backends.py b/superset/results_backends.py index cd3d8de16fce5..bc1ff10cd5a6a 100644 --- a/superset/results_backends.py +++ b/superset/results_backends.py @@ -8,9 +8,9 @@ from __future__ import unicode_literals try: - import cPickle as pickle + import cPickle as pickle except: - import pickle + import pickle import logging import StringIO From 68592aeddfdd88a2cb291533a9e595cff9b5d6d2 Mon Sep 17 00:00:00 2001 From: Benjamin Yolken Date: Fri, 3 Feb 2017 21:43:31 -0800 Subject: [PATCH 10/12] Fix StringIO import in results_backends module --- superset/results_backends.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/superset/results_backends.py b/superset/results_backends.py index bc1ff10cd5a6a..541c0091dcde6 100644 --- a/superset/results_backends.py +++ b/superset/results_backends.py @@ -13,7 +13,11 @@ import pickle import logging -import StringIO + +try: + import StringIO +except ImportError: + import io as StringIO import boto3 from werkzeug.contrib.cache import BaseCache From 716406198e50b04d2f6600c518b01f65ad690748 Mon Sep 17 00:00:00 2001 From: Benjamin Yolken Date: Fri, 3 Feb 2017 21:50:04 -0800 Subject: [PATCH 11/12] Clean up imports of cPickle and StringIO --- superset/results_backends.py | 2 +- tests/results_backends_tests.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/superset/results_backends.py b/superset/results_backends.py index 541c0091dcde6..824eb7b2274d3 100644 --- a/superset/results_backends.py +++ b/superset/results_backends.py @@ -9,7 +9,7 @@ try: import cPickle as pickle -except: +except ImportError: import pickle import logging diff --git a/tests/results_backends_tests.py b/tests/results_backends_tests.py index 9f4e8dc6b26b8..146759c5f0418 100644 --- a/tests/results_backends_tests.py +++ b/tests/results_backends_tests.py @@ -1,6 +1,6 @@ try: import cPickle as pickle -except: +except ImportError: import pickle import mock From 461e41cd610d1bff33ac10c6ea5879b498a16f41 Mon Sep 17 00:00:00 2001 From: Benjamin Yolken Date: Fri, 3 Feb 2017 22:36:34 -0800 Subject: [PATCH 12/12] Use BytesIO instead of StringIO for python2/3 compatibility --- superset/results_backends.py | 10 +++------- 1 file changed, 3 insertions(+), 7 deletions(-) diff --git a/superset/results_backends.py b/superset/results_backends.py index 824eb7b2274d3..0448d7c390c4b 100644 --- a/superset/results_backends.py +++ b/superset/results_backends.py @@ -12,13 +12,9 @@ except ImportError: import pickle +import io import logging -try: - import StringIO -except ImportError: - import io as StringIO - import boto3 from werkzeug.contrib.cache import BaseCache @@ -56,7 +52,7 @@ def get(self, key): if not self._key_exists(key): return None else: - value_file = StringIO.StringIO() + value_file = io.BytesIO() try: self.s3_client.download_fileobj( @@ -117,7 +113,7 @@ def set(self, key, value, timeout=None): ``pickle.PickleError``. :rtype: boolean """ - value_file = StringIO.StringIO() + value_file = io.BytesIO() pickle.dump(value, value_file) try: