1

I'm trying to upload large video files to my onedrive storage using nodejs. Currently I'm able to upload files lesser than 60 MB with ease. The problem arises when I want to upload files larger than this. I get the following error ->

"error": {
    "code": "invalidRange",
    "message": "The uploaded fragment is not contiguous with the last one.",
    "innererror": {
        "code": "fragmentOutOfOrder"
    } 
}

This is the code I've used:

var fs = require('fs');
var request = require('request');
var async = require('async');

var client_id = "39bc#####################91";
var redirect_uri = "https://script.google.com/macros/s/#######/usercallback";
var client_secret = "xu##################{";
var refresh_token = "MCSIUo########################w$$";
var file = "videoplayback.mp4"; // Filename you want to upload.
var onedrive_folder = 'uploads'; // Folder on OneDrive
var onedrive_filename = file; // If you want to change the filename on OneDrive, please set this.

function resUpload(){
    request.post({
        url: 'https://login.microsoftonline.com/common/oauth2/v2.0/token',
        form: {
            client_id: client_id,
            redirect_uri: redirect_uri,
            client_secret: client_secret,
            grant_type: "refresh_token",
            refresh_token: refresh_token,
        },
    }, function(error, response, body) { // Here, it creates the session.
        request.post({
            url: 'https://graph.microsoft.com/v1.0/drive/root:/' + onedrive_folder + '/' + onedrive_filename + ':/createUploadSession',
            headers: {
                'Authorization': "Bearer " + JSON.parse(body).access_token,
                'Content-Type': "application/json",
            },
            body: '{"item": {"@microsoft.graph.conflictBehavior": "rename", "name": "' + onedrive_filename +'"}}',
        }, function(er, re, bo) {
            uploadFile(JSON.parse(bo).uploadUrl);
        });
    });
}

function uploadFile(uploadUrl) { // Here, it uploads the file by every chunk.
    async.eachSeries(getparams(), function(st, callback){
        setTimeout(function() {
            fs.readFile(file, function read(e, f) {
                request.put({
                    url: uploadUrl,
                    headers: {
                        'Content-Length': st.clen,
                        'Content-Range': st.cr,
                    },
                    body: f.slice(st.bstart, st.bend + 1),
                }, function(er, re, bo) {
                    console.log(bo);
                });
            });
            console.log(st.bstart)
            callback(); 
        }, st.stime);
    });
}

function getparams(){
    var allsize = fs.statSync(file).size;
    var sep = allsize < (60 * 1024 * 1024) ? allsize : (60 * 1024 * 1024) - 1;
    var ar = [];
    for (var i = 0; i < allsize; i += sep) {
        var bstart = i;
        var bend = i + sep - 1 < allsize ? i + sep - 1 : allsize - 1;
        var cr = 'bytes ' + bstart + '-' + bend + '/' + allsize;
        var clen = bend != allsize - 1 ? sep : allsize - i;
        var stime = allsize < (60 * 1024 * 1024) ? 10000 : 20000;
        ar.push({
            bstart : bstart,
            bend : bend,
            cr : cr,
            clen : clen,
            stime: stime,
        }); 
    }
    //console.log("Hello2")
    return ar;
}

resUpload();

Also is this the right way to do it? If not which is the best approach/code to upload large files?

Tân
  • 1
  • 15
  • 56
  • 102
  • We'd have to see the requests being sent to confirm, but it sounds like the second chunk is being sent before the first has completed. Perhaps this question is relevant? https://stackoverflow.com/questions/23864052/async-eachseries-in-node-js – Brad Nov 12 '18 at 18:18

0 Answers0