80

I am getting an acccess denied error from S3 AWS service on my Lambda function.

This is the code:

// dependencies
var async = require('async');
var AWS = require('aws-sdk');
var gm = require('gm').subClass({ imageMagick: true }); // Enable ImageMagick integration.

exports.handler = function(event, context) {
    var srcBucket = event.Records[0].s3.bucket.name;
    // Object key may have spaces or unicode non-ASCII characters.
    var key = decodeURIComponent(event.Records[0].s3.object.key.replace(/\+/g, " "));
/*
{
    originalFilename: <string>,
    versions: [
        {
            size: <number>,
            crop: [x,y],
            max: [x, y],
            rotate: <number>
        }
    ]
}*/
    var fileInfo;
    var dstBucket = "xmovo.transformedimages.develop";
    try {
        //TODO: Decompress and decode the returned value
        fileInfo = JSON.parse(key);
        //download s3File

        // get reference to S3 client
        var s3 = new AWS.S3();

        // Download the image from S3 into a buffer.
        s3.getObject({
                Bucket: srcBucket,
                Key: key
            },
            function (err, response) {
                if (err) {
                    console.log("Error getting from s3: >>> " + err + "::: Bucket-Key >>>" + srcBucket + "-" + key + ":::Principal>>>" + event.Records[0].userIdentity.principalId, err.stack);
                    return;
                }

                // Infer the image type.
                var img = gm(response.Body);
                var imageType = null;
                img.identify(function (err, data) {
                    if (err) {
                        console.log("Error image type: >>> " + err);
                        deleteFromS3(srcBucket, key);
                        return;
                    }
                    imageType = data.format;

                    //foreach of the versions requested
                    async.each(fileInfo.versions, function (currentVersion, callback) {
                        //apply transform
                        async.waterfall([async.apply(transform, response, currentVersion), uploadToS3, callback]);

                    }, function (err) {
                        if (err) console.log("Error on excecution of watefall: >>> " + err);
                        else {
                            //when all done then delete the original image from srcBucket
                            deleteFromS3(srcBucket, key);
                        }
                    });
                });
            });
    }
    catch (ex){
        context.fail("exception through: " + ex);
        deleteFromS3(srcBucket, key);
        return;
    }
        function transform(response, version, callback){
            var imageProcess = gm(response.Body);
            if (version.rotate!=0) imageProcess = imageProcess.rotate("black",version.rotate);
            if(version.size!=null) {
                if (version.crop != null) {
                    //crop the image from the coordinates
                    imageProcess=imageProcess.crop(version.size[0], version.size[1], version.crop[0], version.crop[1]);
                }
                else {
                    //find the bigger and resize proportioned the other dimension
                    var widthIsMax = version.size[0]>version.size[1];
                    var maxValue = Math.max(version.size[0],version.size[1]);
                    imageProcess=(widthIsMax)?imageProcess.resize(maxValue):imageProcess.resize(null, maxValue);
                }
            }


            //finally convert the image to jpg 90%
            imageProcess.toBuffer("jpg",{quality:90}, function(err, buffer){
                if (err) callback(err);
                callback(null, version, "image/jpeg", buffer);
            });

        }

        function deleteFromS3(bucket, filename){
            s3.deleteObject({
                Bucket: bucket,
                Key: filename
            });
        }

        function uploadToS3(version, contentType, data, callback) {
            // Stream the transformed image to a different S3 bucket.
            var dstKey = fileInfo.originalFilename + "_" + version.size + ".jpg";
            s3.putObject({
                Bucket: dstBucket,
                Key: dstKey,
                Body: data,
                ContentType: contentType
            }, callback);
        }
};

This is the error on Cloudwatch:

AccessDenied: Access Denied

This is the stack error:

at Request.extractError (/var/runtime/node_modules/aws-sdk/lib/services/s3.js:329:35)

at Request.callListeners (/var/runtime/node_modules/aws-sdk/lib/sequential_executor.js:105:20) 

at Request.emit (/var/runtime/node_modules/aws-sdk/lib/sequential_executor.js:77:10)

at Request.emit (/var/runtime/node_modules/aws-sdk/lib/request.js:596:14)

at Request.transition (/var/runtime/node_modules/aws-sdk/lib/request.js:21:10) 

at AcceptorStateMachine.runTo (/var/runtime/node_modules/aws-sdk/lib/state_machine.js:14:12) 

at /var/runtime/node_modules/aws-sdk/lib/state_machine.js:26:10 

at Request.<anonymous> (/var/runtime/node_modules/aws-sdk/lib/request.js:37:9) 

at Request.<anonymous> (/var/runtime/node_modules/aws-sdk/lib/request.js:598:12) 

at Request.callListeners (/var/runtime/node_modules/aws-sdk/lib/sequential_executor.js:115:18)

Without any other description or info on S3 bucket permissions allow to everyone put list and delete.

What can I do to access the S3 bucket?

PS: on Lambda event properties the principal is correct and has administrative privileges.

caffreyd
  • 1,151
  • 1
  • 17
  • 25
cyberdantes
  • 1,342
  • 3
  • 16
  • 28
  • Possible duplicate of [Access denied on aws lambda function when getObject from S3 bucket](http://stackoverflow.com/questions/35605622/access-denied-on-aws-lambda-function-when-getobject-from-s3-bucket) – iuri Feb 25 '16 at 13:50

16 Answers16

72

Interestingly enough, AWS returns 403 (access denied) when the file does not exist. Be sure the target file is in the S3 bucket.

From the comment from @Trajan , I think it's worth in the Answer itself...

It actually depends on the permission you have (see the doc) - If you have the s3:ListBucket permission on the bucket, Amazon S3 will return an HTTP status code 404 ("no such key") error. - if you don’t have the s3:ListBucket permission, Amazon S3 will return an HTTP status code 403 ("access denied") error.

Tauseef
  • 2,035
  • 18
  • 17
vedat
  • 1,193
  • 9
  • 10
  • 3
    FWIW, In my case the missing file was caused by the URL encoding of keys in S3 events. – Typhlosaurus Jun 20 '17 at 19:18
  • 3
    This. I spent an hour trying to figure out the reason for my "Access Denied" errors. Then checked my S3 bucket, and lo and behold, the object_key doesn't exist. – urover Jan 23 '18 at 18:36
  • 1
    This was a useful answer because i thought it had to do with permissions, where in fact it was "only" a url encoding issue with german umlauts. For gods sake, why isnt this exposed as 404 ? – Logemann Apr 22 '18 at 11:23
  • Or if the bucket doesn't exist – AndyOS Jul 11 '18 at 12:11
  • 9
    @Marc object not found results in 403 (access denied) rather than 404 (not found) because different return codes would provide an attacker with useful information - it leaks information that an object of a given name actually exists. A simple dictionary-style attack could then enumerate all of the objects in someone's bucket. For a similar reason, a login page should never emit "Invalid user" and "Invalid password" for the two authentication failure scenarios; it should always emit "Invalid credentials". – jarmod Nov 10 '18 at 17:58
  • 18
    It actually depends on the permission you have (see [the doc](https://docs.aws.amazon.com/fr_fr/AmazonS3/latest/API/RESTObjectGET.html)) - If you have the `s3:ListBucket` permission on the bucket, Amazon S3 will return an HTTP status code 404 ("no such key") error. - if you don’t have the `s3:ListBucket` permission, Amazon S3 will return an HTTP status code 403 ("access denied") error. – Trajan Mar 08 '19 at 11:11
  • @Trajan Your link is dead. Here is the updated one. https://docs.aws.amazon.com/AmazonS3/latest/API/API_GetObject.html – jellycsc Jul 05 '20 at 15:25
  • What @Tarjan said - I would recommend to always add Listbucket so that you can debug 403/404 issues – Glenn Bech Aug 26 '20 at 03:44
  • This whole thread should be the approved comment! Including the note from @Trajan – Edgar Zagórski Oct 18 '20 at 19:12
70

If you are specifying the Resource don't forget to add the sub folder specification as well. Like this:

"Resource": [
  "arn:aws:s3:::BUCKET-NAME",
  "arn:aws:s3:::BUCKET-NAME/*"
]
TheVTM
  • 1,510
  • 13
  • 14
  • 2
    Seriously... I just tried out writing something from a lambda to a bucket. Had all the policies in place and the first resource was in (the one without .../*) Nothing worked. Could you explain why I need the /* one mandatorily? Seems kind of weird if I add a rule for the whole bucket, doesn't it? – Tim.G. Jun 11 '18 at 14:23
  • 35
    my god 3 hours wasted over a missing asterix – 0x6C38 Jun 29 '18 at 05:03
  • @Tim.G. It doesn't have the ability to add a GetObject rule for the whole bucket. You have to add a GetObject Rule on an object path. The /* is an object path. For GetObject, the "whole bucket" resource does absolutely nothing. Contrast that with something like GetBucket, which expects a bucket. – mdfst13 May 04 '20 at 10:35
57

Your Lambda does not have privileges (S3:GetObject).

Go to IAM dashboard, check the role associated with your Lambda execution. If you use AWS wizard, it automatically creates a role called oneClick_lambda_s3_exec_role. Click on Show Policy. It should show something similar to the attached image. Make sure S3:GetObject is listed.

enter image description here

helloV
  • 50,176
  • 7
  • 137
  • 145
  • The function is later attempting to DeleteObject and a PutObject so those actions would also have to be granted in the policy. – garnaat Feb 24 '16 at 01:18
  • in fact my lambda function has this policy, is identical to the one you described in your answer, but the truth is that even so keeps telling me Access Denied – cyberdantes Feb 24 '16 at 13:25
  • 4
    Then the best solution is to allow S3FullAccess, see if it works. If it does, then remove one set of access at a time from the policy and find the least privileges required for your Lambda to work. If it does not work even after giving S3FullAccess, then the problem is elsewhere. – helloV Feb 24 '16 at 15:20
  • 2
    And if you try to set ACL explicitly during a call to putObject you will also need to specifically allow the s3:PutObjectACL Action in the policy as well. – Octopus Oct 18 '16 at 22:28
  • 2
    @cyberdantes Did you resolve the issue? The role I'm using to execute lambda has full S3 Access which is {S3:*}. But I'm gettiing access denied from getObject function. – KMC Dec 12 '16 at 03:07
  • 9
    I've resolved the issue by appending "/*" to the Resource attribute, e.g. arn:aws:s3:::some-bucket-name/* – JFX Mar 23 '17 at 14:41
  • Really helped me a lot – Setu Kumar Basak Mar 26 '19 at 08:38
26

I ran into this issue and after hours of IAM policy madness, the solution was to:

  1. Go to S3 console
  2. Click bucket you are interested in.
  3. Click 'Properties'
  4. Unfold 'Permissions'
  5. Click 'Add more permissions'
  6. Choose 'Any Authenticated AWS User' from dropdown. Select 'Upload/Delete' and 'List' (or whatever you need for your lambda).
  7. Click 'Save'

Done. Your carefully written IAM role policies don't matter, neither do specific bucket policies (I've written those too to make it work). Or they just don't work on my account, who knows.

[EDIT]

After a lot of tinkering the above approach is not the best. Try this:

  1. Keep your role policy as in the helloV post.
  2. Go to S3. Select your bucket. Click Permissions. Click Bucket Policy.
  3. Try something like this:
{
    "Version": "2012-10-17",
    "Id": "Lambda access bucket policy",
    "Statement": [
        {
            "Sid": "All on objects in bucket lambda",
            "Effect": "Allow",
            "Principal": {
                "AWS": "arn:aws:iam::AWSACCOUNTID:root"
            },
            "Action": "s3:*",
            "Resource": "arn:aws:s3:::BUCKET-NAME/*"
        },
        {
            "Sid": "All on bucket by lambda",
            "Effect": "Allow",
            "Principal": {
                "AWS": "arn:aws:iam::AWSACCOUNTID:root"
            },
            "Action": "s3:*",
            "Resource": "arn:aws:s3:::BUCKET-NAME"
        }
    ]
}

Worked for me and does not require for you to share with all authenticated AWS users (which most of the time is not ideal).

Adam Owczarczyk
  • 2,802
  • 1
  • 16
  • 21
  • Welp! I didn't know before that we need bucket policy to access S3 objects - despite the Lambda's IAM role having S3FullAccess. Thanks @adam-owczarczyk. Adding the bucket policy helped. – spacewanderer Nov 26 '21 at 03:51
12

If you have encryption set on your S3 bucket (such as AWS KMS), you may need to make sure the IAM role applied to your Lambda function is added to the list of IAM > Encryption keys > region > key > Key Users for the corresponding key that you used to encrypt your S3 bucket at rest.

In my screenshot, for example, I added the CyclopsApplicationLambdaRole role that I have applied to my Lambda function as a Key User in IAM for the same AWS KMS key that I used to encrypt my S3 bucket. Don't forget to select the correct region for your key when you open up the Encryption keys UI.

Find the execution role you've applied to your Lambda function: screenshot of Lambda execution role

Find the key you used to add encryption to your S3 bucket: screenshot of the key selected for the S3 bucket

In IAM > Encryption keys, choose your region and click on the key name: screenshot of region dropdown in IAM

Add the role as a Key User in IAM Encryption keys for the key specified in S3: screenshot of IAM key users selection

Danny Bullis
  • 3,043
  • 2
  • 29
  • 35
6

If all the other policy ducks are in a row, S3 will still return an Access Denied message if the object doesn't exist AND the requester doesn't have ListBucket permission on the bucket.

From https://docs.aws.amazon.com/AmazonS3/latest/API/RESTObjectGET.html:

...If the object you request does not exist, the error Amazon S3 returns depends on whether you also have the s3:ListBucket permission.

If you have the s3:ListBucket permission on the bucket, Amazon S3 will return an HTTP status code 404 ("no such key") error. if you don’t have the s3:ListBucket permission, Amazon S3 will return an HTTP status code 403 ("access denied") error.

pztrick
  • 3,741
  • 30
  • 35
Jeremiah
  • 81
  • 1
  • 3
4

I too ran into this issue, I fixed this by providing s3:GetObject* in the ACL as it is attempting to obtain a version of that object.

Steven Lu
  • 2,150
  • 1
  • 24
  • 33
2

I tried to execute a basic blueprint Python lambda function [example code] and I had the same issue. My execition role was lambda_basic_execution

I went to S3 > (my bucket name here) > permissions .

S3:BucketPolicyView

Because I'm beginner, I used the Policy Generator provided by Amazon rather than writing JSON myself: http://awspolicygen.s3.amazonaws.com/policygen.html my JSON looks like this:

{
    "Id": "Policy153536723xxxx",
    "Version": "2012-10-17",
    "Statement": [
        {
            "Sid": "Stmt153536722xxxx",
            "Action": [
                "s3:GetObject"
            ],
            "Effect": "Allow",
            "Resource": "arn:aws:s3:::tokabucket/*",
            "Principal": {
                "AWS": [
                    "arn:aws:iam::82557712xxxx:role/lambda_basic_execution"
                ]
            }
        }
    ]

And then the code executed nicely:

foo

O-9
  • 1,626
  • 16
  • 15
2

I solved my problem following all the instruction from the AWS - How do I allow my Lambda execution role to access my Amazon S3 bucket?:

  1. Create an AWS Identity and Access Management (IAM) role for the Lambda function that grants access to the S3 bucket.

  2. Modify the IAM role's trust policy.

  3. Set the IAM role as the Lambda function's execution role.

  4. Verify that the bucket policy grants access to the Lambda function's execution role.

dasilvadaniel
  • 413
  • 4
  • 8
  • 4. This is not necessary, As long as you gave one Allow statement that allows the IAM user or role to access the bucket, the operation will succeed – Glenn Bech Aug 26 '20 at 03:42
  • 4. is only necessary if you're accessing the bucket from another account. – Kieran Nov 03 '22 at 06:11
1

I was trying to read a file from s3 and create a new file by changing content of file read (Lambda + Node). Reading file from S3 did not had any problem. As soon I tried writing to S3 bucket I get 'Access Denied' error.

I tried every thing listed above but couldn't get rid of 'Access Denied'. Finally I was able to get it working by giving 'List Object' permission to everyone on my bucket. S3 Bucket Access Control List

Obviously this not the best approach but nothing else worked.

imTheManager
  • 219
  • 3
  • 5
  • 2
    Explicitly define `"Action": "s3:ListBucket"` in the respective policy to avoid this. – taky2 Feb 05 '19 at 01:44
1

After searching for a long time i saw that my bucket policy was only allowed read access and not put access:

    "Version": "2012-10-17",
    "Statement": [
        {
            "Sid": "PublicListGet",
            "Effect": "Allow",
            "Principal": "*",
            "Action": [
                "s3:List*",
                "s3:Get*",
                "s3:Put*"
            ],
            "Resource": [
                "arn:aws:s3:::bucketName",
                "arn:aws:s3:::bucketName/*"
            ]
        }
    ]
}

Also another issue might be that in order to fetch objects from cross region you need to initialize new s3 client with other region name like:

const getS3Client = (region) => new S3({ region })

I used this function to get s3 client based on region.

jazeb007
  • 578
  • 1
  • 5
  • 11
0

I was struggling with this issue for hours. I was using AmazonS3EncryptionClient and nothing I did helped. Then I noticed that the client is actually deprecated, so I thought I'd try switching to the builder model they have:

var builder = AmazonS3EncryptionClientBuilder.standard()
  .withEncryptionMaterials(new StaticEncryptionMaterialsProvider(encryptionMaterials))
if (accessKey.nonEmpty && secretKey.nonEmpty) builder = builder.withCredentials(new AWSStaticCredentialsProvider(new BasicAWSCredentials(accessKey.get, secretKey.get)))
builder.build()

And... that solved it. Looks like Lambda has trouble injecting the credentials in the old model, but works well in the new one.

yi1
  • 383
  • 4
  • 13
0

I was getting the same error "AccessDenied: Access Denied" while cropping s3 images using lambda function. I updated the s3 bucket policy and IAM role inline policy as per the document link given below.

But still, I was getting the same error. Then I realised, I was trying to give "public-read" access in a private bucket. After removed ACL: 'public-read' from S3.putObject problem get resolved.

https://aws.amazon.com/premiumsupport/knowledge-center/access-denied-lambda-s3-bucket/

Sanjeev Chauhan
  • 3,977
  • 3
  • 24
  • 30
0

I had this error message in aws lambda environment when using boto3 with python:

botocore.exceptions.ClientError: An error occurred (AccessDenied) when calling the GetObject operation: Access Denied

It turns out I needed an extra permission because I was using object tags. If your objects have tags you will need s3:GetObject AND s3:GetObjectTagging for getting the object.

0

I have faced the same problem when creating Lambda function that should have read S3 bucket content. I created the Lambda function and S3 bucket using AWS CDK. To solve this within AWS CDK, I used magic from the docs.

Resources that use execution roles, such as lambda.Function, also implement IGrantable, so you can grant them access directly instead of granting access to their role. For example, if bucket is an Amazon S3 bucket, and function is a Lambda function, the code below grants the function read access to the bucket.

bucket.grantRead(function);
Jaroslav Bezděk
  • 6,967
  • 6
  • 29
  • 46
0

Spent 3 days on this one. All the user roles and permissions don't work if you specify an IAM role in your Lamda code. For example in my code I have this:

const config = require('config.js')
AWS.config.update(config.aws_remote_config);`

In the config file I have access keys to an IAM role that DOES NOT support S3 Access. And because of that I am denied access to S3 even though I added all the permissions in "Execution role" I guess when you fire up the code it overrules the permission set before.

This should be documented better because I've been reading amazon articles for 3 days and no one mentioned this.

0ldMaid
  • 51
  • 3