I solved this problem with a custom s3 backend that override the upload function & use django-storages instead of boto to save files. try this :
from ajaxuploader.backends.base import AbstractUploadBackend
from django.core.files.storage import default_storage
class S3CustomUpload(AbstractUploadBackend):
NUM_PARALLEL_PROCESSES = 4
def upload_chunk(self, chunk):
#save file to s3
self._fd.write(chunk)
self._fd.close()
def setup(self, filename):
self._fd = default_storage.open('%s/%s' % ('uploads/materials/', str(filename)), 'wb')
def upload(self, uploaded, filename, raw_data, *args, **kwargs):
try:
if raw_data:
# File was uploaded via ajax, and is streaming in.
chunk = uploaded.read(self.BUFFER_SIZE)
while len(chunk) > 0:
self.upload_chunk(chunk, *args, **kwargs)
chunk = uploaded.read(self.BUFFER_SIZE)
else:
# File was uploaded via a POST, and is here.
for chunk in uploaded.chunks():
self.upload_chunk(chunk, *args, **kwargs)
return True
except:
# things went badly.
return False
def upload_complete(self, request, filename, *args, **kwargs):
upload = Upload()
upload.upload = settings.S3_URL + "uploads/materials/"+ filename
upload.name = filename
upload.save()
return {'pk': upload.pk}