Skip to content

Commit

Permalink
add conditionnal mimetype upload file s3
Browse files Browse the repository at this point in the history
  • Loading branch information
cyrilmanuel authored and sarsurgithub committed Nov 14, 2024
1 parent 6dd6d71 commit b2028eb
Showing 1 changed file with 34 additions and 1 deletion.
35 changes: 34 additions & 1 deletion attachment_s3/models/ir_attachment.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
from urllib.parse import urlsplit

from odoo import _, api, exceptions, models
from odoo.addons.base.models.ir_attachment import IrAttachment as BaseIrAttachment
from ..s3uri import S3Uri

_logger = logging.getLogger(__name__)
Expand Down Expand Up @@ -178,9 +179,38 @@ def _store_file_read(self, fname, bin_size=False):
else:
return super(IrAttachment, self)._store_file_read(fname, bin_size)

def _inverse_datas(self):
location = self._storage()
for attach in self:
# compute the fields that depend on datas
value = attach.datas
bin_data = base64.b64decode(value) if value else b''
vals = {
'file_size': len(bin_data),
'checksum': self._compute_checksum(bin_data),
'index_content': self._index(
bin_data, attach.datas_fname,
attach.mimetype),
'store_fname': False,
'db_datas': value,
}
if value and location != 'db':
# save it to the filestore
self = self.with_context(mimetype=attach.mimetype)
vals['store_fname'] = self._file_write(value, vals['checksum'])
vals['db_datas'] = False

# take current location in filestore to possibly garbage-collect it
fname = attach.store_fname
# write as superuser, as user probably does not have write access
super(BaseIrAttachment, attach.sudo()).write(vals)
if fname:
self._file_delete(fname)

@api.model
def _store_file_write(self, key, bin_data):
location = self.env.context.get('storage_location') or self._storage()
mimetype = self.env.context.get('mimetype')
if location == 's3':
bucket = self._get_s3_bucket()
obj = bucket.Object(key=key)
Expand All @@ -189,7 +219,10 @@ def _store_file_write(self, key, bin_data):
file.seek(0)
filename = 's3://%s/%s' % (bucket.name, key)
try:
obj.upload_fileobj(file)
if mimetype:
obj.upload_fileobj(file, ExtraArgs={'ContentType': mimetype})
else:
obj.upload_fileobj(file)
except ClientError as error:
# log verbose error from s3, return short message for user
_logger.exception(
Expand Down

0 comments on commit b2028eb

Please sign in to comment.