0

I'm trying to send a Revit file to my Bucket chunk by chunk. My Revit file is almost 13 MB. Here is my code:

function handleFileSelect(evt) {
    var files = evt.target.files; 
    var file = files[0];

    var segmentSize = 1024 * 1024 * 5; //5 MB
    var startingByte = 0;
    var endingByte = startingByte + segmentSize - 1;
    var segments = Math.ceil(file.size / segmentSize);
    var session = Math.floor(100000000 + Math.random() * -900000000);
    

    for (var i = 0; i < segments; i ++)
    {
        var blob = file.slice(startingByte, endingByte);
        var url = 'https://developer.api.autodesk.com/oss/v2/buckets/' + 'linked_model' + '/objects/' + file.name + '/resumable';
        //console.log(url);
        var contentRange = 'bytes ' + startingByte + '-' + endingByte + '/' + file.size;

        $.ajax({
            type: 'PUT',
            url: url,
            data: blob,
            headers: {
                'Authorization':'Bearer ' + token,
                'Content-Type':'application/octet-stream',
                'Content-Range': contentRange,
                'Session-Id': session
            },
            crossDomain: true,
            processData: false,
            success: function (data) {
                console.log(i);
                startingByte = endingByte + 1;
                endingByte = startingByte + segmentSize - 1;
                },
            error: function (XMLHttpRequest, textStatus, errorThrown) {
                alert("Status: " + textStatus); alert("Error: " + errorThrown);
                console.log(startingByte);
                console.log(endingByte);
                console.log(file.size);
            }
        });
    }
}

It gives me error: 416 (Requested Range Not Satisfiable)

Can anyone help?

Augusto Goncalves
  • 8,493
  • 2
  • 17
  • 44
  • This method makes a bunch of async calls almost at the same time. I could imagine that you need to call them one after each other. – Tobias Weibel Mar 08 '17 at 13:11
  • 13mb ? why are you chunking it, you can upload it all at once. – Jaime Rosales Mar 08 '17 at 20:22
  • It is just a sample file instead of sending 1 GB. All I need to know is why this is not working. – mustafa.salaheldin Mar 09 '17 at 12:57
  • What @TobiasWeibel says, it makes sense why is failing, try to run them sync since the file is that small and the service is fast enough probably your async calls are being called at strange times, again since the model is that small, Try the same code with a larger file just for science I guess. – Jaime Rosales Mar 09 '17 at 22:01
  • I'm hitting the same error as you do and your question is perfectly legit. I will do my best to sort it out the upcoming week and update this thread with my findings. Cheers! – Felipe Jun 03 '17 at 20:52

1 Answers1

1

I had the same 416 error but my issue was that I tried to upload chunks smaller than 2MB, which is not doable (except for the last chunk).

When I increased the chunks size to 5MB it started to work. I just wrote a blog article about it: https://forge.autodesk.com/blog/nailing-large-files-uploads-forge-resumable-api

Below is the core piece of code that handles chunking and uploading (in node.js).

By the way, I strongly discourage you to perform this kind of operation client-side as your snippet suggests, this means you have to pass a write-access token to the web page which compromises security of your app. You should first upload the file to your server and then securely upload it to Forge as described in the post and my sample.

/////////////////////////////////////////////////////////
// Uploads object to bucket using resumable endpoint
//
/////////////////////////////////////////////////////////
uploadObjectChunked (getToken, bucketKey, objectKey,
                     file,  opts = {}) {

  return new Promise((resolve, reject) => {

    const chunkSize = opts.chunkSize || 5 * 1024 * 1024

    const nbChunks = Math.ceil(file.size / chunkSize)

    const chunksMap = Array.from({
      length: nbChunks
    }, (e, i) => i)

    // generates uniques session ID
    const sessionId = this.guid()

    // prepare the upload tasks
    const uploadTasks = chunksMap.map((chunkIdx) => {

      const start = chunkIdx * chunkSize

      const end = Math.min(
          file.size, (chunkIdx + 1) * chunkSize) - 1

      const range = `bytes ${start}-${end}/${file.size}`

      const length = end - start + 1

      const readStream =
        fs.createReadStream(file.path, {
          start, end: end
        })

      const run = async () => {

        const token = await getToken()

        return this._objectsAPI.uploadChunk(
          bucketKey, objectKey,
          length, range, sessionId,
          readStream, {},
          {autoRefresh: false}, token)
      }

      return {
        chunkIndex: chunkIdx,
        run
      }
    })

    let progress = 0

    // runs asynchronously in parallel the upload tasks
    // number of simultaneous uploads is defined by
    // opts.concurrentUploads
    eachLimit(uploadTasks, opts.concurrentUploads || 3,
      (task, callback) => {

        task.run().then((res) => {

          if (opts.onProgress) {

            progress += 100.0 / nbChunks

            opts.onProgress ({
              progress: Math.round(progress * 100) / 100,
              chunkIndex: task.chunkIndex
            })
          }

          callback ()

        }, (err) => {

          console.log('error')
          console.log(err)

          callback(err)
        })

    }, (err) => {

        if (err) {

          return reject(err)
        }

        return resolve({
          fileSize: file.size,
          bucketKey,
          objectKey,
          nbChunks
        })
    })
  })
}
Felipe
  • 4,325
  • 1
  • 14
  • 19