Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

add conditionnal mimetype to upload file in bucket s3 #404

Closed
Closed
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
35 changes: 34 additions & 1 deletion attachment_s3/models/ir_attachment.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
from urllib.parse import urlsplit

from odoo import _, api, exceptions, models
from odoo.addons.base.models.ir_attachment import IrAttachment as BaseIrAttachment
from ..s3uri import S3Uri

_logger = logging.getLogger(__name__)
Expand Down Expand Up @@ -178,9 +179,38 @@ def _store_file_read(self, fname, bin_size=False):
else:
return super(IrAttachment, self)._store_file_read(fname, bin_size)

def _inverse_datas(self):
location = self._storage()
for attach in self:
# compute the fields that depend on datas
value = attach.datas
bin_data = base64.b64decode(value) if value else b''
vals = {
'file_size': len(bin_data),
'checksum': self._compute_checksum(bin_data),
'index_content': self._index(
bin_data, attach.datas_fname,
attach.mimetype),
'store_fname': False,
'db_datas': value,
}
if value and location != 'db':
# save it to the filestore
self = self.with_context(mimetype=attach.mimetype)
vals['store_fname'] = self._file_write(value, vals['checksum'])
vals['db_datas'] = False

# take current location in filestore to possibly garbage-collect it
fname = attach.store_fname
# write as superuser, as user probably does not have write access
super(BaseIrAttachment, attach.sudo()).write(vals)
if fname:
self._file_delete(fname)

@api.model
def _store_file_write(self, key, bin_data):
location = self.env.context.get('storage_location') or self._storage()
mimetype = self.env.context.get('mimetype')
if location == 's3':
bucket = self._get_s3_bucket()
obj = bucket.Object(key=key)
Expand All @@ -189,7 +219,10 @@ def _store_file_write(self, key, bin_data):
file.seek(0)
filename = 's3://%s/%s' % (bucket.name, key)
try:
obj.upload_fileobj(file)
if mimetype:
obj.upload_fileobj(file, ExtraArgs={'ContentType': mimetype})
else:
obj.upload_fileobj(file)
except ClientError as error:
# log verbose error from s3, return short message for user
_logger.exception(
Expand Down
Loading