0

I'm trying to implement a dropzone.js interface to upload files to Amazon S3. I followed a couple examples I found online

<html>
<head>
    <title>S3 Dropzone</title>
    <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>
    <link rel="stylesheet" href="https://rawgit.com/enyo/dropzone/master/dist/dropzone.css">
    <script src="https://rawgit.com/enyo/dropzone/master/dist/dropzone.js"></script>
    <script src="https://ajax.googleapis.com/ajax/libs/jquery/3.4.1/jquery.min.js"></script>
    <script src="https://sdk.amazonaws.com/js/aws-sdk-2.476.0.min.js"></script>
    <script language="JavaScript">
        var bucket = "test-bucket";
        var region = "us-east-2";
        var userId = "11111";

        $(document).ready(function($) {
            $.ajax({
                url: "/s3_credentials.php",
                data: {},
                type: 'POST',
                dataType: 'json',
                success: function(response) {
                    bucket = response.Bucket;
                    region = response.region;
                    AWS.config.credentials = new AWS.Credentials({
                        accessKeyId:response.accessKeyId,
                        secretAccessKey:response.secretAccessKey
                    });
                    console.log("credentials response", response, AWS.config.credentials );
                }
            })
        });
        // let aws-sdk send events to dropzone.
        function sendEvents(file) {
            let progress = i => dz.emit('uploadprogress', file, i.loaded * 100 / i.total, i.loaded);
            file.s3upload.on('httpUploadProgress', progress);
            file.s3upload.send(err => err ? dz.emit('error', file, err) : dz.emit('complete', file));
        }

        Dropzone.prototype.uploadFiles = files => files.map(sendEvents);
    </script>

</head>

<body>

<br><br><br>

<h1 style="text-align: center">ImageUpload to S3Bucket</h1>
<div id="dz" class="dz-clickable dropzone"></div>

<script language="JavaScript">
    Dropzone.autoDiscover = false;
    let canceled = file => { if (file.s3upload) file.s3upload.abort() }
    let options = {
        url: '/',
        canceled,
        removedfile: canceled,
        accept(file, done) {
            let params = {
                Bucket: bucket,
                Key: userId + "/" + file.name,
                Body: file
            };
            params.accessKeyId = AWS.config.credentials.accessKeyId;
            params.secretAccessKey = AWS.config.credentials.secretAccessKey;
            console.log('upload params', params);
            file.s3upload = new AWS.S3.ManagedUpload({params});
            done();
        }
    };
    var dz = new Dropzone('#dz', options );
</script>
</body>
</html>

The credentials script is straightforward:

<?php
$AWSBucket = "dropzone-bucket-test";
$AWSRegion = "us-east-2";
$AWSUserArn = "arn:aws:iam::22222222222:user/test-bucket";
$AWSUserAccessKeyId = "*****************";
$AWSUserSecretAccessKey = "**********************************";

header('Content-Type: application/json');
echo json_encode([
    'Bucket'          => $AWSBucket,
    'accessKeyId'     => $AWSUserAccessKeyId,
    'secretAccessKey' => $AWSUserSecretAccessKey,
    'region'          => $AWSRegion,
    'userArn'         => $AWSUserArn
]);

The response coming back for the 3rd+ PUT calls seems to say that the region is somehow falling back to a default of us-east-1 somewhere. None of my code includes that region info but I see it in some of the AWS sdk as the default.

<?xml version="1.0" encoding="UTF-8"?>
<Error>
    <Code>AuthorizationHeaderMalformed</Code>
    <Message>The authorization header is malformed; the region 'us-east-1' is wrong; expecting 'us-east-2'</Message>
    <Region>us-east-2</Region>
    <RequestId>********************</RequestId>
    <HostId>***********************************************************************=</HostId>
</Error>

It will upload 1 or two files with no problem. After that, it seems to start failing with 400 errors. Examining the responses, it's saying that the region is wrong but I'm not sure how it's getting that way. I'm assuming it's falling back to a default value in the aws-sdk but I'm not sure why.

I even tried initializing my variables with the correct values before doing the credentials call to get the configured values from the server.

Any help is appreciated.

smac2020
  • 9,637
  • 4
  • 24
  • 38
Scott
  • 7,983
  • 2
  • 26
  • 41

1 Answers1

0

I am not sure why your code displays such behavior but one thing that seems strange to me is structure of the first script. You are fetching credentials in $(document).ready function via AJAX call which is asynchronous but the code that is responsible for // let aws-sdk send events to dropzone. sits outside of the AJAX's success handler and even outside of $(document).ready function which means that it is executed before your AWS SDK is configured with those credentials.

I would suggest placing the mentioned code into AJAX's success handler.

<script language="JavaScript">
    var bucket = "test-bucket";
    var region = "us-east-2";
    var userId = "11111";

    function sendEvents(file) {
        let progress = i => dz.emit('uploadprogress', file, i.loaded * 100 / i.total, i.loaded);
        file.s3upload.on('httpUploadProgress', progress);
        file.s3upload.send(err => err ? dz.emit('error', file, err) : dz.emit('complete', file));
    }

    $(document).ready(function($) {
        $.ajax({
            url: "/s3_credentials.php",
            data: {},
            type: 'POST',
            dataType: 'json',
            success: function(response) {
                bucket = response.Bucket;
                region = response.region;
                AWS.config.credentials = new AWS.Credentials({
                    accessKeyId:response.accessKeyId,
                    secretAccessKey:response.secretAccessKey
                });
                console.log("credentials response", response, AWS.config.credentials);

                // let aws-sdk send events to dropzone.
                Dropzone.prototype.uploadFiles = files => files.map(sendEvents);
            }
        })
    });
</script>
Matus Dubrava
  • 13,637
  • 2
  • 38
  • 54
  • thanks for the tip - in my various versions I instantiate a separate variable s3 in one of them and set the values in the specific object. That's just a temporary testing way of accessing aws. Eventually that will all be replaced with openId per-user credentials. I think the 'managedUpload' may be part of my problem with the queue and if I also switch to signed urls, I might solve this issue. In the meantime, I just set the parallelUploads high. – Scott Jul 17 '19 at 22:01