Skip to content
This repository was archived by the owner on Apr 21, 2021. It is now read-only.

Commit 1a4944c

Browse files
committed
Fix for downloading large files
1 parent f4ae85c commit 1a4944c

2 files changed

Lines changed: 23 additions & 27 deletions

File tree

ckanext/s3filestore/controller.py

Lines changed: 12 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -8,10 +8,11 @@
88
import ckan.lib.base as base
99
import ckan.model as model
1010
import ckan.lib.uploader as uploader
11-
from ckan.common import _, request, c, response
11+
from ckan.common import _, request, c, response, streaming_response
1212
from botocore.exceptions import ClientError
1313

1414
from ckanext.s3filestore.uploader import S3Uploader
15+
import webob
1516

1617
import logging
1718
log = logging.getLogger(__name__)
@@ -45,6 +46,7 @@ def resource_download(self, id, resource_id, filename=None):
4546
upload = uploader.get_resource_uploader(rsc)
4647
bucket_name = config.get('ckanext.s3filestore.aws_bucket_name')
4748
region = config.get('ckanext.s3filestore.region_name')
49+
host_name = config.get('ckanext.s3filestore.host_name')
4850
bucket = upload.get_s3_bucket(bucket_name)
4951

5052
if filename is None:
@@ -57,8 +59,15 @@ def resource_download(self, id, resource_id, filename=None):
5759
.format(key_path, bucket_name))
5860

5961
try:
60-
obj = bucket.Object(key_path)
61-
contents = str(obj.get()['Body'].read())
62+
# Small workaround to manage downloading of large files
63+
# We are using redirect to minio's resource public URL
64+
s3 = upload.get_s3_session()
65+
client = s3.client(service_name='s3', endpoint_url=host_name)
66+
url = client.generate_presigned_url(ClientMethod='get_object',
67+
Params={'Bucket': bucket.name,
68+
'Key': key_path})
69+
redirect(url)
70+
6271
except ClientError as ex:
6372
if ex.response['Error']['Code'] == 'NoSuchKey':
6473
# attempt fallback
@@ -79,24 +88,6 @@ def resource_download(self, id, resource_id, filename=None):
7988
else:
8089
raise ex
8190

82-
dataapp = paste.fileapp.DataApp(contents)
83-
84-
try:
85-
status, headers, app_iter = request.call_application(dataapp)
86-
except OSError:
87-
abort(404, _('Resource data not found'))
88-
89-
response.headers.update(dict(headers))
90-
response.status = status
91-
content_type, x = mimetypes.guess_type(rsc.get('url', ''))
92-
if content_type:
93-
response.headers['Content-Type'] = content_type
94-
return app_iter
95-
96-
elif 'url' not in rsc:
97-
abort(404, _('No download is available'))
98-
redirect(str(rsc['url']))
99-
10091
def filesystem_resource_download(self, id, resource_id, filename=None):
10192
"""
10293
A fallback controller action to download resources from the

ckanext/s3filestore/uploader.py

Lines changed: 11 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -39,23 +39,28 @@ def get_directory(self, id, storage_path):
3939
directory = os.path.join(storage_path, id)
4040
return directory
4141

42+
def get_s3_session(self):
43+
return boto3.session.Session(aws_access_key_id=self.p_key,
44+
aws_secret_access_key=self.s_key,
45+
region_name=self.region)
46+
4247
def get_s3_bucket(self, bucket_name):
4348
'''Return a boto bucket, creating it if it doesn't exist.'''
4449

4550
# make s3 connection using boto3
46-
session = boto3.session.Session(aws_access_key_id=self.p_key,
47-
aws_secret_access_key=self.s_key,
48-
region_name=self.region)
49-
s3 = session.resource('s3', endpoint_url=self.host_name,
50-
config=botocore.client.Config(signature_version=self.signature))
51+
52+
s3 = self.get_s3_session().resource('s3', endpoint_url=self.host_name,
53+
config=botocore.client.Config(
54+
signature_version=self.signature))
5155
bucket = s3.Bucket(bucket_name)
5256
try:
5357
if s3.Bucket(bucket.name) in s3.buckets.all():
5458
log.info('Bucket {0} found!'.format(bucket_name))
5559

5660
else:
5761
log.warning(
58-
'Bucket {0} could not be found, attempting to create it...'.format(bucket_name))
62+
'Bucket {0} could not be found,\
63+
attempting to create it...'.format(bucket_name))
5964
try:
6065
bucket = s3.create_bucket(Bucket=bucket_name, CreateBucketConfiguration={
6166
'LocationConstraint': 'us-east-1'})

0 commit comments

Comments
 (0)