I am trying to upload video file to s3 but after putting in task queue with celery. While the video is being uploaded, user can do other things.
My views.py to call celery tasks
def upload_blob(request, iterator, interview_id, candidate_id, question_id):
try:
interview_obj = Interview.objects.get(id=interview_id)
except ObjectDoesNotExist:
interview_obj = None
current_interview = interview_obj
if request.method == 'POST':
print("inside POST")
# newdoc1 = Document(upload=request.FILES['uploaded_video'], name="videos/interview_"+interview_id+"_candidate_"+candidate_id+"_question_"+question_id)
# newdoc1.save()
save_document_model.delay(request.FILES['uploaded_video'],"videos/interview_"+interview_id+"_candidate_"+candidate_id+"_question_"+question_id)
# newdoc2 = Document(upload=request.FILES['uploaded_audio'], name="audios/interview_"+interview_id+"_candidate_"+candidate_id+"_question_"+question_id)
# newdoc2.save()
save_document_model.delay(request.FILES['uploaded_audio'],"audios/interview_"+interview_id+"_candidate_"+candidate_id+"_question_"+question_id)
iterator = str(int(iterator) + 1)
return HttpResponseRedirect(reverse('candidate:show_question', kwargs={'iterator': iterator,'interview_id':current_interview.id,'question_id':question_id}))
else:
return render(request, 'candidate/record_answer.html')
Actual celery tasks.py
@task(name="save_document_model")
def save_document_model(uploaded_file, file_name):
newdoc = Document(upload=uploaded_file, name=file_name)
newdoc.save()
logger.info("document saved successfully")
return HttpResponse("document saved successfully")
Document Model
def upload_function(instance, filename):
getname = instance.name
customlocation = os.path.join(settings.AWS_S3_CUSTOM_DOMAIN, settings.MEDIAFILES_LOCATION, getname)
# Add other filename logic here
return getname # Return the end filename where you want it saved.
class Document(models.Model):
name = models.CharField(max_length=25)
uploaded_at = models.DateTimeField(auto_now_add=True)
upload = models.FileField(upload_to=upload_function)
Settings.py
AWS_ACCESS_KEY_ID = '**********************'
AWS_SECRET_ACCESS_KEY = '**************************'
AWS_STORAGE_BUCKET_NAME = '*********'
AWS_S3_CUSTOM_DOMAIN = '%s.s3.amazonaws.com' % AWS_STORAGE_BUCKET_NAME
AWS_S3_OBJECT_PARAMETERS = {
'CacheControl': 'max-age=86400',
}
AWS_LOCATION = 'static'
AWS_DEFAULT_ACL = None
MEDIAFILES_LOCATION = 'uploads/'
DEFAULT_FILE_STORAGE = 'watsonproj.storage_backends.MediaStorage'
# CELERY STUFF
BROKER_URL = 'redis://localhost:6379'
CELERY_RESULT_BACKEND = 'redis://localhost:6379'
CELERY_ACCEPT_CONTENT = ['application/json']
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_SERIALIZER = 'json'
CELERY_TIMEZONE = 'Africa/Nairobi'
CELERY_IMPORTS=("candidate.tasks")
Direct upload is working without celery, but with celery I am getting this error:
Object of type 'InMemoryUploadedFile' is not JSON serializable