0

I am trying to upload a list of files on aws s3 using cpp sdk. But only few files are uploading and it is getting terminated before uploading all the files. I am using upload api of aws s3 cpp sdk.

 bool OnUpload_test(bool status, const char* carouselname, void* userdata){
        cout << "status" << status<< endl;
        cout << "Objectname =" << carouselname << endl;
        return true;

}


bool upload_thread(Proxy* proxy, const char* file,const char* path, int duration)
{
        char output_file[512];
        snprintf(output_file,512,"%s/%s",path,file);
        proxy->Upload(file, output_file,duration,OnUpload_test,nullptr);
        return true;
}

int main(){
     
        char filename[9][128];
        int i = 0, num_thread = 10,j=0,k=0;

        thread *thread_count[num_thread];
        Proxy *m_proxy = new Proxy("123","bucket_name",20);

        //creating thread to test upload
        for (int i = 1, j = 0; i<=9; i++, j++)
        {
             
                snprintf(filename[j],128,"centos%d.mp4",i);
                cout << "count = " << j << endl;

                thread_count[i] = new thread(upload_thread, m_proxy,(const char*) filename[j],"path",20);
           

        }
        for (i = 1; i<=9; i++)
        {
                thread_count[i]->join();
               

        }
       sleep(120);
}

Upload function s3 Api to upload data to s3. file sizes are 40 mb on an average. I am trying to upload 9 files.

////////////////////////////////////////////////////////////////////

 bool UploadData::Uploaddata(string bucketName, string objname, string 
           objdata,int duration,const char* id)
{


        Aws::String bcktName(bucketName.c_str(), bucketName.size());
        Aws::String obj(objname.c_str(), objname.size());
        Aws::String objdt(objdata.c_str(), objdata.size());

        Aws::Client::ClientConfiguration config;
        config.connectTimeoutMs = 5000000;
        config.requestTimeoutMs = 6000000;

        const Aws::String user_region = "ap-south-1";
        config.region = user_region;
        Aws::S3::S3Client client(config);

        Aws::S3::Model::PutObjectRequest request;

        request.SetBucket(bcktName);
        request.SetKey(obj);

        
        const std::shared_ptr<Aws::IOStream> input_data = Aws::MakeShared<Aws::FStream>(obj.c_str(),objdt.c_str(),std::ios_base::in | std::ios_base::binary);
        

        input_data->seekg(0, input_data->end);

        int fsize = input_data->tellg();
        input_data->seekg(0, input_data->beg);
        
        request.SetContentLength(fsize);
        request.SetBody(input_data);


        auto outcome = client.PutObject(request);
        if (!outcome.IsSuccess()) {
                std::cout <<"UploadObject error = "<< outcome.GetError().GetMessage().c_str() << std::endl;
                return false;
        }
}

1 Answers1

0

Since you didn't post other relevant parts of the code, like the S3 client configuration, I can only guess.. Maybe the timeout is not enough ? Try to increase requestTimeoutMs and connectTimeoutMs in the client configuration.

Edit

Your code is not a mwe (for example..what is Proxy ?). There is no initialization and shutdown of AWS SDK..

This is what works for me using threads:

#include <iostream>
#include <fstream>
#include <thread>
#include <vector>
#include <unistd.h>
#include <aws/core/Aws.h>
#include <aws/core/auth/AWSCredentialsProvider.h>
#include <aws/core/utils/StringUtils.h>
#include <aws/s3/S3Client.h>
#include <aws/s3/model/GetObjectRequest.h>
#include <aws/s3/model/PutObjectRequest.h>
#include <aws/s3/model/Bucket.h>

using namespace std;
using namespace Aws;

const Aws::String AWS_ACCESS_KEY_ID = "<your ACCESS KEY>";
const Aws::String AWS_SECRET_ACCESS_KEY = "<your SECRET ACCESS KEY>";

bool Uploaddata(string bucketName, string objname)
{


       Aws::String bcktName = Aws::String (bucketName.c_str());
       Aws::String obj = Aws::String (objname.c_str());

       Aws::Client::ClientConfiguration config;
       config.scheme = Aws::Http::Scheme::HTTPS;
       config.connectTimeoutMs = 5000000;
       config.requestTimeoutMs = 6000000;
       config.region = Aws::Region::EU_WEST_1;

       Aws::S3::S3Client client(Aws::Auth::AWSCredentials(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY), config);


       Aws::S3::Model::PutObjectRequest request;

       request.SetBucket(bcktName);
       request.SetKey(obj);

       auto requestStream = Aws::MakeShared<Aws::FStream>("PutObjectInputStream", obj.c_str(), std::ios_base::in | std::ios_base::binary);
       request.SetBody(requestStream);

       auto outcome = client.PutObject(request);
       if (!outcome.IsSuccess()) {
               std::cout <<"UploadObject error = "<< outcome.GetError().GetMessage().c_str() << std::endl;
               return false;
       }
}


int main(){

       char filename[9][128];
       std::string name_bucket = "<your bucket>";
       std::vector<std::thread> threads;

       Aws::SDKOptions options;
       Aws::InitAPI(options);
       {
           //creating thread to test upload
           for (int j = 1; j<=9; j++)
           {
                   snprintf(filename[j],128,"centos%d.txt",j);
                   cout << "count = " << j << endl;
                   threads.push_back(std::thread(Uploaddata,name_bucket,filename[j]));

           }
          for (auto& th : threads) th.join();
          sleep(120);
       }
       Aws::ShutdownAPI(options);
}
rok
  • 2,574
  • 3
  • 23
  • 44
  • @ rok as you have suggested i have increased the requestTimeoutMs and connectTimeoutMs still same issue. – Mohsin Khan Nov 27 '20 at 07:41
  • how much did you increase ? Does the number of uploaded files increased ? – rok Nov 27 '20 at 08:36
  • @ rok earlier it was this config.connectTimeoutMs = 50000; config.requestTimeoutMs = 600000; now config.connectTimeoutMs = 5000000; config.requestTimeoutMs = 6000000; No still only 2 files are being uploaded. – Mohsin Khan Nov 27 '20 at 09:21
  • @ rok no its terminating normal after sleep . – Mohsin Khan Nov 28 '20 at 13:58
  • i have tried your code but i am getting errors. Actually after uploading objects i am applying lifecycle rule on those object and i am getting following error. ERROR: : OperationAborted: Unable to parse ExceptionName: OperationAborted Message: A conflicting conditional operation is currently in progress against this resource. Please try again. – Mohsin Khan Nov 30 '20 at 10:19
  • This is another question.. you should open a new question about the lifecycle posting exaclty what you are doing. Does the upload with my code works? If it doesn't work, what are the errors you get? – rok Nov 30 '20 at 10:45
  • @ rok actually still only few video files uploaded. Not all files are getting uploaded. I have opened a new question related to lifecycle rule. – Mohsin Khan Nov 30 '20 at 10:56
  • I just tried my code with 9 files ~50mb and everything works, so your problem is not the code, it's maybe the lifecycle rule? Maybe the upload is successful but the rule somehow delete some of the files? If something goes wrong with the upload, you must receive an error from AWS.. – rok Nov 30 '20 at 12:49