Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Port Project to Python3.9 #16

Merged
merged 1 commit into from
Feb 7, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -7,3 +7,6 @@ docs/build/
#compiled output
*.pyc

.pytest_cache/

.venv/
22 changes: 11 additions & 11 deletions assetman/S3UploadThread.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
#!/bin/python
from __future__ import with_statement

import re
import os
import os.path
import sys
import threading
import datetime
import Queue
import queue as Queue
import mimetypes
import logging
import boto3
Expand Down Expand Up @@ -42,7 +42,7 @@ def run(self):
file_name, file_path = self.queue.get()
try:
self.start_upload_file(file_name, file_path)
except Exception, e:
except Exception as e:
logging.error('Error uploading %s: %s', file_name, e)
self.errors.append((sys.exc_info(), self))
finally:
Expand All @@ -52,8 +52,8 @@ def start_upload_file(self, file_name, file_path):
"""Starts the procecss of uploading a file to S3. Each file will be
uploaded twice (once for CDN and once for our local CDN proxy).
"""
assert isinstance(file_name, (str, unicode))
assert isinstance(file_path, (str, unicode))
assert isinstance(file_name, str)
assert isinstance(file_path, str)
assert os.path.isfile(file_path)

content_type, content_encoding = mimetypes.guess_type(file_name)
Expand Down Expand Up @@ -88,7 +88,7 @@ def exists(self, obj):
# https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/s3.html#S3.Client.head_object
try:
self.client.head_object(Bucket=obj.bucket_name, Key=obj.key)
except Exception, e:
except Exception as e:
logging.error('got %s', e)
return False
return True
Expand Down Expand Up @@ -143,7 +143,7 @@ def upload_assets_to_s3(manifest, settings, skip_s3_upload=False):

# We know we want to upload each asset block (these correspond to the
# assetman.include_* blocks in each template)
for depspec in manifest.blocks.itervalues():
for depspec in manifest.blocks.values():
file_name = depspec['versioned_path']
file_path = make_output_path(settings['compiled_asset_root'], file_name)
assert os.path.isfile(file_path), 'Missing compiled asset %s' % file_path
Expand All @@ -154,7 +154,7 @@ def upload_assets_to_s3(manifest, settings, skip_s3_upload=False):
# but we'll need to filter out other entries in the complete 'assets'
# block of the manifest.
should_skip = re.compile(r'\.(scss|less|css|js|html)$', re.I).search
for rel_path, depspec in manifest.assets.iteritems():
for rel_path, depspec in manifest.assets.items():
if should_skip(rel_path):
continue
file_path = make_absolute_static_path(settings['static_dir'], rel_path)
Expand All @@ -170,11 +170,11 @@ def upload_assets_to_s3(manifest, settings, skip_s3_upload=False):
# Upload assets to S3 using 5 threads
queue = Queue.Queue()
errors = []
for i in xrange(5):
for i in range(5):
uploader = S3UploadThread(queue, errors, manifest, settings)
uploader.setDaemon(True)
uploader.start()
map(queue.put, to_upload)
list(map(queue.put, to_upload))
queue.join()
if errors:
raise Exception(errors)
Expand Down Expand Up @@ -202,7 +202,7 @@ def replacer(match):
replacement_link = prefix.rstrip('/') + '/' + versioned_path.lstrip('/')
logging.info('replacing %s -> %s', path, replacement_link)
return replacement_link
logging.warn('Missing path %s in manifest, using %s', path, match.group(0))
logging.warning('Missing path %s in manifest, using %s', path, match.group(0))
return match.group(0)
pattern = get_static_pattern(static_url_prefix)
return re.sub(pattern, replacer, src)
4 changes: 2 additions & 2 deletions assetman/__init__.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
from assetman.manifest import Manifest

# also update in setup.py
__version__ = "0.2.0"
version_info = (0, 2, 0)
__version__ = "0.3.0"
version_info = (0, 3, 0)
45 changes: 18 additions & 27 deletions assetman/compile.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
#!/bin/python

from __future__ import with_statement


import os
import re
Expand All @@ -22,10 +22,6 @@ class NeedsCompilation(Exception):

parser = OptionParser(description='Compiles assets for AssetMan')

parser.add_option(
'--django_template_dirs', type="string", action='append',
help='Directory to crawl looking for static assets to compile.')

parser.add_option(
'--tornado_template_dirs', type="string", action='append',
help='Directory to crawl looking for static assets to compile.')
Expand Down Expand Up @@ -192,7 +188,7 @@ def iter_template_deps(static_dir, src_path, static_url_prefix):
if os.path.isfile(dep_path):
yield dep_path
else:
logging.warn('Missing dep %s (src: %s)', dep_path, src_path)
logging.warning('Missing dep %s (src: %s)', dep_path, src_path)

###############################################################################

Expand All @@ -208,9 +204,9 @@ def version_dependency(path, manifest):
if manifest.assets[path]['version']:
return manifest.assets[path]['version']
h = hashlib.md5()
h.update(get_file_hash(make_absolute_static_path(manifest.settings['static_dir'], path)))
h.update(get_file_hash(make_absolute_static_path(manifest.settings['static_dir'], path)).encode())
for dep_path in manifest.assets[path]['deps']:
h.update(version_dependency(dep_path, manifest))
h.update(version_dependency(dep_path, manifest).encode())
version = h.hexdigest()
_, ext = os.path.splitext(path)
manifest.assets[path]['version'] = version
Expand Down Expand Up @@ -298,7 +294,7 @@ def iter_static_deps(static_dir, src_path, static_url_prefix):
if os.path.isfile(dep_path):
yield dep_path
else:
logging.warn('Missing dep %s (src: %s)', dep_path, src_path)
logging.warning('Missing dep %s (src: %s)', dep_path, src_path)


def _build_manifest_helper(static_dir, src_paths, static_url_prefix, manifest):
Expand All @@ -315,25 +311,23 @@ def _build_manifest_helper(static_dir, src_paths, static_url_prefix, manifest):
_build_manifest_helper(static_dir, [dep_path], static_url_prefix, manifest)


def build_manifest(tornado_paths, django_paths, settings):
def build_manifest(tornado_paths, settings):
"""Recursively builds the dependency manifest for the given list of source
paths.
"""
assert isinstance(tornado_paths, (list, tuple))
assert isinstance(django_paths, (list, tuple))

paths = list(set(tornado_paths).union(set(django_paths)))
paths = list(set(tornado_paths))
# First, parse each template to build a list of AssetCompiler instances
path_infos = [(x, 'tornado_template') for x in tornado_paths]
path_infos += [(x, 'django_template') for x in django_paths]
compilers = build_compilers(path_infos, settings)

# Add each AssetCompiler's paths to our set of paths to search for deps
paths = set(paths)
for compiler in compilers:
new_paths = compiler.get_paths()
if settings.get('verbose'):
print compiler, new_paths
print(compiler, new_paths)
paths.update(new_paths)
paths = list(paths)

Expand Down Expand Up @@ -365,7 +359,6 @@ def _create_settings(options):
static_dir=options.static_dir,
static_url_prefix=options.static_url_path,
tornado_template_dirs=options.tornado_template_dirs,
django_template_dirs=options.django_template_dirs,
template_extension=options.template_ext,
test_needs_compile=options.test_needs_compile,
skip_s3_upload=options.skip_s3_upload,
Expand All @@ -386,7 +379,7 @@ def run(settings):
logging.info('Creating output directory: %s', settings['compiled_asset_root'])
os.makedirs(settings['compiled_asset_root'])

for d in settings['tornado_template_dirs'] + settings['django_template_dirs']:
for d in settings['tornado_template_dirs']:
if not os.path.isdir(d):
raise Exception('Template directory not found: %r', d)

Expand All @@ -395,22 +388,20 @@ def run(settings):

# Find all the templates we need to parse
tornado_paths = list(iter_template_paths(settings['tornado_template_dirs'], settings['template_extension']))
django_paths = list(iter_template_paths(settings['django_template_dirs'], settings['template_extension']))

logging.debug('found %d tornado and %d django template paths', len(tornado_paths), len(django_paths))
if not tornado_paths and not django_paths:
logging.warn("No templates found")
if not tornado_paths:
logging.warning("No templates found")

# Load the current manifest and generate a new one
cached_manifest = Manifest(settings).load()
try:
current_manifest, compilers = build_manifest(tornado_paths, django_paths, settings)
except ParseError, e:
current_manifest, compilers = build_manifest(tornado_paths, settings)
except ParseError as e:
src_path, msg = e.args
logging.error('Error parsing template %s', src_path)
logging.error(msg)
raise Exception
except DependencyError, e:
except DependencyError as e:
src_path, missing_deps = e.args
logging.error('Dependency error in source %s!', src_path)
logging.error('Missing paths: %s', missing_deps)
Expand All @@ -421,7 +412,7 @@ def run(settings):
# compiler's source path figures into the dependency tracking. But we only
# need to actually compile each block once.
logging.debug('Found %d assetman block compilers', len(compilers))
compilers = dict((c.get_hash(), c) for c in compilers).values()
compilers = list(dict((c.get_hash(), c) for c in compilers).values())
logging.debug('%d unique assetman block compilers', len(compilers))

# update the manifest on each our compilers to reflect the new manifest,
Expand All @@ -436,7 +427,7 @@ def needs_compile(compiler):
if settings['force_recompile']:
to_compile = compilers
else:
to_compile = filter(needs_compile, compilers)
to_compile = list(filter(needs_compile, compilers))

if to_compile or cached_manifest.needs_recompile(current_manifest):
# If we're only testing whether a compile is needed, we're done
Expand All @@ -447,8 +438,8 @@ def needs_compile(compiler):
try:
# See note above about bug in pool.map w/r/t KeyboardInterrupt.
_compile_worker = CompileWorker(settings.get('skip_inline_images', False), current_manifest)
pool.map_async(_compile_worker, to_compile).get(1e100)
except CompileError, e:
pool.map_async(_compile_worker, to_compile).get(1e9) # previously set to 1e100 which caused overflow of C _PyTime_t_
except CompileError as e:
cmd, msg = e.args
logging.error('Compile error!')
logging.error('Command: %s', ' '.join(cmd))
Expand Down
21 changes: 11 additions & 10 deletions assetman/compilers.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from __future__ import absolute_import, with_statement


import base64
from collections import defaultdict
Expand All @@ -24,13 +24,14 @@ def run_proc(cmd, stdin=None):
popen_args = dict(stdout=subprocess.PIPE, stderr=subprocess.PIPE)
if stdin is not None:
popen_args['stdin'] = subprocess.PIPE
stdin = stdin.encode()
proc = subprocess.Popen(cmd, **popen_args)
out, err = proc.communicate(input=stdin)
if proc.returncode != 0:
raise CompileError(cmd, err)
elif err:
logging.warn('%s stderr:\n%s', cmd[0], err)
return out
logging.warning('%s stderr:\n%s', cmd[0], err)
return out.decode()

class CompileError(Exception):
"""Error encountered while compiling assets."""
Expand Down Expand Up @@ -88,16 +89,16 @@ def needs_compile(self, cached_manifest, current_manifest):
if cached_manifest.blocks[name_hash]['version'] == content_hash:
compiled_path = self.get_compiled_path()
if not os.path.exists(compiled_path):
logging.warn('Missing compiled asset %s from %s',
logging.warning('Missing compiled asset %s from %s',
compiled_path, self)
return True
return False
else:
logging.warn('Contents of %s changed', self)
logging.warning('Contents of %s changed', self)
else:
compiled_path = self.get_compiled_path()
if not os.path.exists(compiled_path):
logging.warn('New/unknown hash %s from %s', name_hash, self)
logging.warning('New/unknown hash %s from %s', name_hash, self)
else:
logging.info('new hash %s from %s but already exists on file %s', name_hash, self, compiled_path)
return False
Expand All @@ -109,12 +110,12 @@ def get_current_content_hash(self, manifest):
for path in self.get_paths():
relative_path = make_relative_static_path(self.settings['static_dir'], path)
assert relative_path in manifest.assets, relative_path
h.update(manifest.assets[relative_path]['version'])
h.update(manifest.assets[relative_path]['version'].encode())
return h.hexdigest()

def get_paths(self):
"""Returns a list of absolute paths to the assets contained in this manager."""
paths = map(functools.partial(make_absolute_static_path, self.settings['static_dir']), self.rel_urls)
paths = list(map(functools.partial(make_absolute_static_path, self.settings['static_dir']), self.rel_urls))
try:
assert all(map(os.path.isfile, paths))
except AssertionError:
Expand Down Expand Up @@ -222,9 +223,9 @@ def replacer(match):

result = re.sub(pattern, replacer, css_src)

for url, count in seen_assets.iteritems():
for url, count in seen_assets.items():
if count > 1:
logging.warn('inline asset duplicated %dx: %s', count, url)
logging.warning('inline asset duplicated %dx: %s', count, url)

return result

Expand Down
1 change: 0 additions & 1 deletion assetman/django_assetman/__init__.py

This file was deleted.

3 changes: 0 additions & 3 deletions assetman/django_assetman/models.py

This file was deleted.

1 change: 0 additions & 1 deletion assetman/django_assetman/templatetags/__init__.py

This file was deleted.

66 changes: 0 additions & 66 deletions assetman/django_assetman/templatetags/assetman_tags.py

This file was deleted.

Loading
Loading