Skip to content

Commit

Permalink
Implement s3 storage
Browse files Browse the repository at this point in the history
  • Loading branch information
negasora committed May 7, 2024
1 parent b5da4d5 commit 2fc2ad9
Show file tree
Hide file tree
Showing 7 changed files with 373 additions and 245 deletions.
2 changes: 1 addition & 1 deletion Pipfile
Original file line number Diff line number Diff line change
Expand Up @@ -7,11 +7,11 @@ name = "pypi"
django = "~=3.2"
requests = "~=2.27"
djangorestframework = "~=3.13"
django-storages = "~=1.12"
gunicorn = "~=20.1"
psycopg2 = "~=2.9"
whitenoise = {version = "~=6.0", extras = ["brotli"]}
pymemcache = "~=4.0"
django-storages = {extras = ["s3"], version = "~=1.13"}

[dev-packages]
django-debug-toolbar = "~=3.8.1"
Expand Down
564 changes: 330 additions & 234 deletions Pipfile.lock

Large diffs are not rendered by default.

16 changes: 14 additions & 2 deletions decompiler_explorer/settings/docker.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,20 +29,32 @@

DEFAULT_FILE_STORAGE = os.getenv('DJANGO_FILE_STORAGE', DEFAULT_FILE_STORAGE)
AWS_STORAGE_BUCKET_NAME = os.getenv('AWS_STORAGE_BUCKET_NAME')
AWS_S3_ENDPOINT_URL = os.getenv('AWS_S3_ENDPOINT_URL')
AWS_S3_REGION_NAME = os.getenv('AWS_S3_REGION_NAME')

USING_S3 = AWS_S3_ENDPOINT_URL is not None

_s3_access_key_id_path = Path('/run/secrets/s3_access_key_id')
_s3_secret_access_key_path = Path('/run/secrets/s3_secret_access_key')

if _s3_access_key_id_path.exists() and _s3_secret_access_key_path.exists():
AWS_QUERYSTRING_AUTH = True
AWS_S3_ACCESS_KEY_ID = _s3_access_key_id_path.read_text()
AWS_S3_SECRET_ACCESS_KEY = _s3_secret_access_key_path.read_text()

LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'console': {
'level': 'DEBUG' if DEBUG else 'ERROR',
'level': 'INFO' if DEBUG else 'ERROR',
'class': 'logging.StreamHandler',
},
},
'loggers': {
'django': {
'handlers': ['console'],
'level': 'DEBUG' if DEBUG else 'ERROR',
'level': 'INFO' if DEBUG else 'ERROR',
},
}
}
14 changes: 11 additions & 3 deletions docker-compose.s3.yml
Original file line number Diff line number Diff line change
@@ -1,10 +1,18 @@
version: '3.8'

services:
traefik:
command:
- --log.level=DEBUG
explorer:
environment:
- DJANGO_FILE_STORAGE=storages.backends.s3boto3.S3Boto3Storage
- AWS_STORAGE_BUCKET_NAME=${AWS_STORAGE_BUCKET_NAME:?S3 bucket name unset}
- AWS_S3_ENDPOINT_URL=${AWS_S3_ENDPOINT_URL:?S3 endpoint url unset}
- AWS_S3_REGION_NAME=${AWS_S3_REGION_NAME:?S3 region name unset}
secrets:
- s3_access_key_id
- s3_secret_access_key

secrets:
s3_access_key_id:
file: ./secrets/s3_access_key_id
s3_secret_access_key:
file: ./secrets/s3_secret_access_key
12 changes: 12 additions & 0 deletions explorer/serializers.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import hashlib
from django.conf import settings
from rest_framework import serializers
from rest_framework.reverse import reverse

Expand Down Expand Up @@ -29,6 +30,11 @@ def get_url(self, obj: Decompilation):
return reverse('decompilation-detail', args=[binary.pk, obj.pk], request=self.context['request'])

def get_download_url(self, obj: Decompilation):
if settings.USING_S3:
if obj.decompiled_file.name:
return obj.decompiled_file.url
return None

binary = obj.binary
return reverse('decompilation-download', args=[binary.pk, obj.pk], request=self.context['request'])

Expand Down Expand Up @@ -56,6 +62,9 @@ def create(self, validated_data):
return super().create(validated_data)

def get_download_url(self, obj):
if settings.USING_S3:
return obj.file.url

return reverse('binary-download', args=[obj.pk], request=self.context['request'])

def get_decompilations_url(self, obj):
Expand All @@ -72,6 +81,9 @@ class Meta:
fields = ['id', 'binary_id', 'decompiler', 'created', 'last_attempted', 'download_url', 'completion_url']

def get_download_url(self, obj):
if settings.USING_S3:
return obj.binary.file.url

return reverse('binary-download', args=[obj.binary.pk], request=self.context['request'])

def get_completion_url(self, obj):
Expand Down
4 changes: 0 additions & 4 deletions explorer/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -145,11 +145,7 @@ def get_queryset(self):
def download(self, *args, **kwargs):
instance = self.get_object()

# TODO: This logic can probably be moved to the storage class
handle = instance.decompiled_file.open()
file_header = handle.read(2)
handle.seek(0)

filename = instance.decompiled_file.name.split('/')[-1]

response = FileResponse(handle, content_type='application/octet-stream')
Expand Down
6 changes: 5 additions & 1 deletion scripts/dce.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,9 @@
start_parser.add_argument('--domain', default="dce.localhost", help='Domain name of host')
start_parser.add_argument('--replicas', default=1, help='Number of replicas for the decompiler runners')
start_parser.add_argument('--s3', action='store_true', help='Use S3 for storing uploaded files')
start_parser.add_argument('--s3-bucket', required='--argument' in sys.argv, help='Name of S3 bucket that will store uploaded files')
start_parser.add_argument('--s3-bucket', required='--s3' in sys.argv, help='Name of S3 bucket that will store uploaded files')
start_parser.add_argument('--s3-endpoint', required='--s3' in sys.argv, help='S3-compatible endpoint')
start_parser.add_argument('--s3-region', required='--s3' in sys.argv, help='S3 region')
start_parser.add_argument('--timeout', help='Timeout duration for runners (default: 120)')

stop_parser = subparsers.add_parser('stop')
Expand Down Expand Up @@ -151,6 +153,8 @@ def start_server(args):
if args.s3:
config_files += f' -c {S3_COMPOSE_FILE}'
env["AWS_STORAGE_BUCKET_NAME"] = args.s3_bucket
env["AWS_S3_ENDPOINT_URL"] = args.s3_endpoint
env["AWS_S3_REGION_NAME"] = args.s3_region

if args.debug:
env['DEBUG'] = '1'
Expand Down

0 comments on commit 2fc2ad9

Please sign in to comment.