I have configured sqs on s3 bucket, the sqs should invoke 21 stepfunctions for 21 files uploaded in s3 bucket, instead only 14 stepfunctions are triggered and i am missing remaining 7 events.
The moment i upload 21 files in s3 bucket, these, messages will be pushed to sqs, which will trigger lambda. I have set lambda concurrency to 5. So first 5 step functions should start. Since we uploaded 21 files, total 21 step functiins should execute in batches of 5. Now, 14 step functions are running one after other instead of 21
lambda code
import json
import boto3
import os
import uuid
import time
client = boto3.client('stepfunctions')
ipparam ={}
delay_seconds = 5
max_retry = 3
def lambda_handler(event, context):
print('event',event)
record = event['Records'][0]
body = record['body']
s3EventBody= json.loads(body)
s3EventBody= s3EventBody['Records']
bucket = s3EventBody[0]['s3']['bucket']['name']
key = s3EventBody[0]['s3']['object']['key']
#bucket_name = event['detail']['requestParameters']['bucketName']
#object_key = event['detail']['requestParameters']['key']
ipparam["bucket"] = bucket
ipparam["key"] = key
print('bucket',bucket)
print('key',key)
transactionid = str(uuid.uuid4())
for retry_attempt in range(1,max_retry+1):
try:
response = client.start_execution(
stateMachineArn=os.environ['stepFunctionArn'],
name = transactionid,
input=json.dumps(ipparam)
)
print('responssee',response)
executionArn = response['executionArn']
break
except Exception as e:
print("Error Invoking step function : ",e)
if retry_attempt < max_retry:
print('retrying in seconds : ',delay_seconds )
time.sleep(delay_seconds)
else:
print('Max retries exceeded. Please check logs for further details')
raise
for i in range(1,15):
response = client.describe_execution(
executionArn=executionArn
)
execution_status = response['status']
print('execution_status',execution_status)
if execution_status in ('SUCCEEDED','FAILED'):
return {
'statusCode': 200,
'body': execution_status
}
time.sleep(60)