Skip to content

Commit

Permalink
Merge pull request #14 from jehiah/use_boto3_14
Browse files Browse the repository at this point in the history
switch to boto3
  • Loading branch information
jehiah authored Apr 29, 2021
2 parents af6bd4c + dbfd19c commit 055f2f3
Show file tree
Hide file tree
Showing 3 changed files with 33 additions and 22 deletions.
44 changes: 26 additions & 18 deletions assetman/S3UploadThread.py
100644 → 100755
Original file line number Diff line number Diff line change
Expand Up @@ -5,13 +5,11 @@
import os.path
import sys
import threading
import calendar
import datetime
import email
import Queue
import mimetypes
import logging
from boto.s3.connection import S3Connection
import boto3
from assetman.tools import make_output_path, make_absolute_static_path, make_relative_static_path, get_static_pattern, get_shard_from_list

class S3UploadThread(threading.Thread):
Expand All @@ -28,8 +26,12 @@ class S3UploadThread(threading.Thread):

def __init__(self, queue, errors, manifest, settings):
threading.Thread.__init__(self)
cx = S3Connection(settings.get('aws_access_key'), settings.get('aws_secret_key'))
self.bucket = cx.get_bucket(settings.get('s3_assets_bucket'))
self.client = boto3.client('s3',
aws_access_key_id=settings.get('aws_access_key'),
aws_secret_access_key=settings.get('aws_secret_key'))
self.bucket = boto3.resource('s3',
aws_access_key_id=settings.get('aws_access_key'),
aws_secret_access_key=settings.get('aws_secret_key')).Bucket(settings.get('s3_assets_bucket'))
self.queue = queue
self.errors = errors
self.manifest = manifest
Expand Down Expand Up @@ -66,24 +68,31 @@ def start_upload_file(self, file_name, file_path):
}.get(ext, 'application/octet-stream')
headers = {
'Content-Type': content_type,
'Expires': self.get_expires(),
'Cache-Control': self.get_cache_control(),
'x-amz-acl': 'public-read',
}

with open(file_path, 'rb') as f:
file_data = f.read()
# First we will upload the asset for serving via CloudFront CDN,
# so its S3 key will not have a prefix.
key = self.bucket.new_key(file_name)
key = self.bucket.Object(file_name)
self.upload_file(key, file_data, headers, for_cdn=True)

# Next we will upload the same file with a prefixed key, to be
# served by our "local CDN proxy".
key_prefix = self.settings.get('local_cdn_url_prefix').lstrip('/').rstrip('/')
key = self.bucket.new_key(key_prefix + '/' + file_name)
key = self.bucket.Object(key_prefix + '/' + file_name)
self.upload_file(key, file_data, headers, for_cdn=False)

def exists(self, obj):
# https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/s3.html#S3.Client.head_object
try:
self.client.head_object(Bucket=obj.bucket_name, Key=obj.key)
except Exception, e:
logging.error('got %s', e)
return False
return True

def upload_file(self, key, file_data, headers, for_cdn):
"""Uploads the given file_data to the given S3 key. If the file is a
compiled asset (ie, JS or CSS file), any static URL references it
Expand All @@ -93,32 +102,31 @@ def upload_file(self, key, file_data, headers, for_cdn):
our CloudFront CDN domains. Otherwise, they will be updated to point
to our local CDN proxy.
"""
if not key.exists() or self.settings.get('force_s3_upload'):
if self.settings.get('force_s3_upload') or not self.exists(key):
# Do we need to do URL replacement?
if re.search(r'\.(css|js)$', key.name):
if re.search(r'\.(css|js)$', key.key):
if for_cdn:
logging.info('Rewriting URLs => CDN in %s', key.name)
logging.info('Rewriting URLs => CDN in %s', key.key)
replacement_prefix = self.settings.get('cdn_url_prefix')
else:
logging.info('Rewriting URLs => local proxy in %s', key.name)
logging.info('Rewriting URLs => local proxy in %s', key.key)
replacement_prefix = self.settings.get('local_cdn_url_prefix')
file_data = sub_static_version(
file_data,
self.manifest,
replacement_prefix,
self.settings['static_dir'],
self.settings.get('static_url_prefix'))
key.set_contents_from_string(file_data, headers, replace=self.settings.get('force_s3_upload', False))
logging.info('Uploaded %s', key.name)
key.put(Body=file_data, CacheControl=headers.get('Cache-Control'), ContentType=headers.get('Content-Type'), ACL="public-read", Expires=self.get_expires())
logging.info('Uploaded s3://%s/%s', key.bucket_name, key.key)
logging.debug('Headers: %r', headers)
else:
logging.info('Skipping upload of %s; already exists (use force_s3_upload to override)', key.name)
logging.info('Skipping upload of %s; already exists (use force_s3_upload to override)', key.key)

def get_expires(self):
# Get a properly formatted date and time, via Tornado's set_header()
dt = datetime.datetime.utcnow() + datetime.timedelta(days=365*10)
t = calendar.timegm(dt.utctimetuple())
return email.utils.formatdate(t, localtime=False, usegmt=True)
return dt


def get_cache_control(self):
Expand Down
4 changes: 2 additions & 2 deletions assetman/__init__.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
from assetman.manifest import Manifest

# also update in setup.py
__version__ = "0.1.20"
version_info = (0, 1, 20)
__version__ = "0.1.21"
version_info = (0, 1, 21)
7 changes: 5 additions & 2 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
from distutils.core import setup

setup(name='assetman',
version='0.1.20', # also update in __init__.py
version='0.1.21', # also update in __init__.py
description='AssetMan asset manager',
url="http://github.com/bitly/assetman",
license="Apache Software License",
Expand All @@ -16,6 +16,9 @@
install_requires=['simplejson',
'multiprocessing',
],

classifiers = [
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
],
scripts=['scripts/assetman_compile']
)

0 comments on commit 055f2f3

Please sign in to comment.