Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -58,22 +58,84 @@ def build_pathing_style(self, style, key):
"virtual-hosted.".format(style=style)
)


def put_object(self, key, file_path, style="path", pre_check=False):
"""
Put the object into S3 and return the S3 URL of the object
Put the object into S3 and return the reference to the object
in the requested path style.

Args:
key (str): The S3 object key to check and/or write to.

file_path (str): The file to upload using binary write mode.

style (str): The path style to use when returning the S3 object
location. Valid values are listed in this class using the
static method: supported_path_styles.

pre_check (bool): Whether or not to check if the file exists
in the S3 bucket already. When set to True, it will only
upload if the object does not exist yet. When set to False
it will always perform the upload, whether the object already
exists or not. Be aware, the contents of the object and the
given file are not compared. Only whether the given object key
exists in the bucket or not.

Returns:
str: The S3 object reference in the requested path style. This
will be returned regardless of whether or not an upload was
performed or not. In case the object key existed before, and
pre_check was set to True, calling this function will only
return the reference path to the object.
"""
if pre_check:
try:
self.client.get_object(Bucket=self.bucket, Key=key)
except self.client.exceptions.NoSuchKey:
LOGGER.info("Uploading %s as %s to S3 Bucket %s in %s", file_path, key, self.bucket, self.region)
self.resource.Object(self.bucket, key).put(Body=open(file_path, 'rb'))
finally:
return self.build_pathing_style(style, key) #pylint: disable=W0150
self.resource.Object(self.bucket, key).put(Body=open(file_path, 'rb'))
# Do we need to upload the file to the bucket?
# If we don't need to check first, do. Otherwise, check if it exists
# first and only upload if it does not exist.
if not pre_check or not self._does_object_exist(key):
self._perform_put_object(key, file_path)
return self.build_pathing_style(style, key)

def _does_object_exist(self, key):
"""
Check whether the given S3 object key exists in this bucket or not.

Args:
key (str): The S3 object key to check.

Returns:
bool: True when the object exists, False when it does not.
"""
try:
self.client.get_object(Bucket=self.bucket, Key=key)
return True
except self.client.exceptions.NoSuchKey:
return False

def _perform_put_object(self, key, file_path):
"""
Perform actual put operation without any checks.
This is called internally by the put_object method when the
requested file needs to be uploaded.

Args:
key (str): They S3 key of the object to put the file contents to.

file_path (str): The file to upload using binary write mode.
"""
try:
LOGGER.info(
"Uploading %s as %s to S3 Bucket %s in %s",
file_path,
key,
self.bucket,
self.region,
)
with open(file_path, 'rb') as file_handler:
self.resource.Object(self.bucket, key).put(Body=file_handler)
LOGGER.debug("Upload of %s was successful.", key)
except BaseException:
LOGGER.error("Failed to upload %s", key, exc_info=True)
raise

def read_object(self, key):
s3_object = self.resource.Object(self.bucket, key)
return s3_object.get()['Body'].read().decode('utf-8')
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,10 @@

# pylint: skip-file

import os
import boto3
import botocore
from botocore.stub import Stubber
from pytest import fixture, raises
from stubs import stub_s3
from mock import Mock
from mock import Mock, patch, mock_open
from s3 import S3

@fixture
Expand Down Expand Up @@ -117,3 +116,263 @@ def test_build_pathing_style_unknown_style(us_east_1_cls):

error_message = str(excinfo.value)
assert error_message.find(correct_error_message) >= 0


@patch('s3.S3.build_pathing_style')
@patch('s3.S3._perform_put_object')
@patch('s3.S3._does_object_exist')
def test_put_object_no_checks_always_upload(does_exist, perform_put,
build_path, eu_west_1_cls):
object_key = "some"
object_path = "s3://bucket/{key}".format(key=object_key)
file_path = "some_imaginary_file.json"
path_style = "s3-url"
does_exist.return_value = True
build_path.return_value = object_path

return_value = eu_west_1_cls.put_object(
key=object_key,
file_path=file_path,
style=path_style,
pre_check=False,
)

assert return_value == object_path

does_exist.assert_not_called()
perform_put.assert_called_once_with(object_key, file_path)
build_path.assert_called_once_with(path_style, object_key)


@patch('s3.S3.build_pathing_style')
@patch('s3.S3._perform_put_object')
@patch('s3.S3._does_object_exist')
def test_put_object_do_check_upload_when_missing(
does_exist, perform_put, build_path, eu_west_1_cls):
object_key = "some"
object_path = "s3://bucket/{key}".format(key=object_key)
file_path = "some_imaginary_file.json"
path_style = "s3-url"
does_exist.return_value = False
build_path.return_value = object_path

return_value = eu_west_1_cls.put_object(
key=object_key,
file_path=file_path,
style=path_style,
pre_check=True,
)

assert return_value == object_path

does_exist.assert_called_once_with(object_key)
perform_put.assert_called_once_with(object_key, file_path)
build_path.assert_called_once_with(path_style, object_key)


@patch('s3.S3.build_pathing_style')
@patch('s3.S3._perform_put_object')
@patch('s3.S3._does_object_exist')
def test_put_object_do_check_no_upload_object_present(
does_exist, perform_put, build_path, eu_west_1_cls):
object_key = "some"
object_path = "s3://bucket/{key}".format(key=object_key)
file_path = "some_imaginary_file.json"
path_style = "s3-url"
does_exist.return_value = True
build_path.return_value = object_path

return_value = eu_west_1_cls.put_object(
key=object_key,
file_path=file_path,
style=path_style,
pre_check=True,
)

assert return_value == object_path

does_exist.assert_called_once_with(object_key)
perform_put.assert_not_called()
build_path.assert_called_once_with(path_style, object_key)


@patch('s3.boto3.client')
def test_does_object_exist_yes(boto3_client):
s3_client = botocore.session.get_session().create_client('s3')
s3_client_stubber = Stubber(s3_client)
boto3_client.return_value = s3_client
object_key = "some"

s3_cls = S3(
'eu-west-1',
'some_bucket'
)
response = {}
expected_params = {
'Bucket': s3_cls.bucket,
'Key': object_key,
}
s3_client_stubber.add_response('get_object', response, expected_params)
s3_client_stubber.activate()

assert s3_cls._does_object_exist(key=object_key)

boto3_client.assert_called_once_with('s3', region_name='eu-west-1')
s3_client_stubber.assert_no_pending_responses()


@patch('s3.boto3.client')
def test_does_object_exist_no(boto3_client):
s3_client = botocore.session.get_session().create_client('s3')
s3_client_stubber = Stubber(s3_client)
boto3_client.return_value = s3_client
object_key = "some"

s3_cls = S3(
'eu-west-1',
'some_bucket'
)
s3_client_stubber.add_client_error(
'get_object',
expected_params={'Bucket': s3_cls.bucket, 'Key': object_key},
http_status_code=404,
service_error_code='NoSuchKey',
)
s3_client_stubber.activate()

assert not s3_cls._does_object_exist(key=object_key)

boto3_client.assert_called_once_with('s3', region_name='eu-west-1')
s3_client_stubber.assert_no_pending_responses()


@patch('s3.boto3.resource')
@patch('s3.LOGGER')
def test_perform_put_object_success(logger, boto3_resource):
s3_resource = Mock()
s3_object = Mock()
s3_resource.Object.return_value = s3_object
boto3_resource.return_value = s3_resource
object_key = "some"
file_path = "some-file.json"
file_data = 'some file data'

s3_cls = S3(
'eu-west-1',
'some_bucket'
)
with patch("builtins.open", mock_open(read_data=file_data)) as mock_file:
s3_cls._perform_put_object(
key=object_key,
file_path=file_path,
)
mock_file.assert_called_with(file_path, 'rb')
s3_resource.Object.assert_called_once_with(s3_cls.bucket, object_key)
s3_object.put.assert_called_once_with(Body=mock_file.return_value)

logger.info.assert_called_once_with(
"Uploading %s as %s to S3 Bucket %s in %s",
file_path,
object_key,
s3_cls.bucket,
s3_cls.region,
)
logger.debug.assert_called_once_with(
"Upload of %s was successful.",
object_key,
)
logger.error.assert_not_called()
boto3_resource.assert_called_with('s3', region_name='eu-west-1')


@patch('s3.boto3.resource')
@patch('s3.LOGGER')
def test_perform_put_object_no_such_file(logger, boto3_resource):
s3_resource = Mock()
s3_object = Mock()
s3_resource.Object.return_value = s3_object
boto3_resource.return_value = s3_resource
object_key = "some"
file_path = "some-file.json"

s3_cls = S3(
'eu-west-1',
'some_bucket'
)
correct_error_message = "File not found exception"
with patch("builtins.open") as mock_file:
mock_file.side_effect = Exception(correct_error_message)
with raises(Exception) as excinfo:
s3_cls._perform_put_object(
key=object_key,
file_path=file_path,
)

error_message = str(excinfo.value)
assert error_message.find(correct_error_message) >= 0

mock_file.assert_called_with(file_path, 'rb')
s3_resource.Object.assert_not_called()
s3_object.put.assert_not_called()

logger.info.assert_called_once_with(
"Uploading %s as %s to S3 Bucket %s in %s",
file_path,
object_key,
s3_cls.bucket,
s3_cls.region,
)
logger.debug.assert_not_called()
logger.error.assert_called_once_with(
"Failed to upload %s",
object_key,
exc_info=True,
)
boto3_resource.assert_called_with('s3', region_name='eu-west-1')


@patch('s3.boto3.resource')
@patch('s3.LOGGER')
def test_perform_put_object_failed(logger, boto3_resource):
s3_resource = Mock()
s3_object = Mock()
s3_resource.Object.return_value = s3_object
boto3_resource.return_value = s3_resource
object_key = "some"
file_path = "some-file.json"
file_data = 'some file data'

s3_cls = S3(
'eu-west-1',
'some_bucket'
)
correct_error_message = "Test exception"
s3_object.put.side_effect = Exception(correct_error_message)
with patch("builtins.open", mock_open(read_data=file_data)) as mock_file:
with raises(Exception) as excinfo:
s3_cls._perform_put_object(
key=object_key,
file_path=file_path,
)

error_message = str(excinfo.value)
assert error_message.find(correct_error_message) >= 0

mock_file.assert_called_with(file_path, 'rb')
s3_resource.Object.assert_called_once_with(s3_cls.bucket, object_key)
s3_object.put.assert_called_once_with(Body=mock_file.return_value)

logger.info.assert_called_once_with(
"Uploading %s as %s to S3 Bucket %s in %s",
file_path,
object_key,
s3_cls.bucket,
s3_cls.region,
)
logger.debug.assert_not_called()
logger.error.assert_called_once_with(
"Failed to upload %s",
object_key,
exc_info=True,
)
boto3_resource.assert_called_with('s3', region_name='eu-west-1')