2

I'm developing a wrapper on top of Guacamole. I've made my own backend websocket that connects the Guacamole javascript API to the guacd daemon. Now working on file transfers. Problem I have is via SFTP. I managed to successfully download a file via SFTP regardless of the file size (well, tried 500MB max, i assume larger would work). My problem is with the upload. If i upload a file of a few bytes, it's going to work (81 bytes, to be precise, works. 82 bytes doesn't work anymore).

Here is my upload function:

function UploadFileSFTP(path, filesystem, file, onprogress) {
  console.log('Uploading file ' + file.name + ' to ' + path);
  console.log('file type: ' + file.type);

  let stream = filesystem.createOutputStream(file.type, path);

  stream.onack = function beginUpload(status) {
    if (status.isError()) {
      console.log('Guacamole error on stream');
      console.log(status);
      return;
    }

    var blob_writer = new Guacamole.BlobWriter(stream, file.type);
    blob_writer.oncomplete = function uploadComplete() {
      console.log('upload complete');
      blob_writer.sendEnd();
    };

    blob_writer.onack = function packagereceived(status) {
      console.log('ack status: ');
      console.log(status);
    };

    blob_writer.onerror = function uploadError() {
      console.log('upload error');
    };

    if (file.size > 0) {
      console.log('uploading file of size: ' + file.size);
      var slice_size = 1024;
      var sent = 0;

      while (sent < file.size) {

        //get slices of slice_size, or what's left of the last chunk
        
        //get a slice of the file
        var data = file.slice(sent, sent + slice_size);
        blob_writer.sendBlob(data);

        //increment sent bytes
        sent += tosend;
        console.log(sent);
      }
    } else {
      console.log('error');
    }
  }
}

Now, the "path" argument is correct, since it does work with a few bytes worth of text file. Filesystem object is the one i get from the Guacamole client on the "onfilesystem" callback. "file" parameter is basically the myform.files[0] object. "onprogress" parameter is just a delegate, not used at the moment.

When sending a file of a few bytes, the blob gets sent (and yes, if i make the slice_size lower, for example, just 4, it will send my small file in chunks correctly). Right after all the "blob_writer.sendblob(data)" calls are made, i do receive the blob_writer.onack() callback from the stream, and right after the blobk_writer.oncomplete().

Problem is when i send a larger file (well, slightly larger, basically a file of 512 bytes for example). The "blob_writer.sendblob(data)" executes, all of them. But i get no "onack" back and no "oncomplete". On the SSH machine, the file does get created at the target location, but there are 0 bytes written to it.

Darksody
  • 481
  • 1
  • 4
  • 15
  • Update: after correcting the file.slice() parameters, i manage to send a larger text file (about 1200 bytes), but using a slice_size of 64. That of course is too low, but i did manage to send a slightly larger file. If i try sending a 4MB file, it fails with no error (actually the stream crashes i believe, since my connection just freezes) – Darksody Jun 28 '23 at 17:17

1 Answers1

0

Managed to get it working, and quite at a reasonable speed. One of the problems was the backend, the websocket was failing at some point.

Here is the working Javascript code to upload a file (onprogress callback not used yet)

function UploadFileSFTP(path, filesystem, file, onprogress) {
  console.log('Uploading file ' + file.name + ' to ' + path);
  console.log('file type: ' + file.type);
  console.log('file size: ' + file.size);

  let stream = filesystem.createOutputStream(file.type, path);

  stream.onack = function beginUpload(status) {
    if (status.isError()) {
      console.log('Guacamole error on stream');
      console.log(status);
      return;
    }

    var offset = 0;
    var chunkSize = 1024 * 1024; // 1MB

    function uploadNextChunk() {
      var reader = new FileReader();
      var chunk = file.slice(offset, offset + chunkSize);

      reader.onload = function (event) {
        // Get the chunk content
        var chunkContent = event.target.result;
        console.log(chunkContent.byteLength);
        // Convert the chunk content to a Blob
        var chunkBlob = new Blob([chunkContent]);

        // Write the chunk to the output stream
        var writer = new Guacamole.BlobWriter(stream);
        writer.oncomplete = function () {
          offset += chunkSize;

          // Check if there are more chunks to upload
          if (offset < file.size) {
            // Upload the next chunk
            uploadNextChunk();
          } else {
            // File upload complete
            console.log('File uploaded successfully');
          }
        };
        writer.sendBlob(chunkBlob);
      };

      reader.readAsArrayBuffer(chunk);
    }

    // Start uploading chunks
    uploadNextChunk();
  }
}
Darksody
  • 481
  • 1
  • 4
  • 15
  • When I try to send the file, it throws the file in the range of 700-900 kb. I changed the chunkSize to 10 mb, but it still sends the file slowly. Is there any way to speed this up? I also tried the code below, it sends the file, but for example, when there is a 1 GB file, the SSH or RDP screen hangs, but when I wait 1-2 hours (this time is directly proportional to the size of the file), it sends the file. – Ekrem Aug 22 '23 at 12:54
  • var file = fileInput.files[0]; var reader = new FileReader(); reader.onloadend = function fileContentsLoaded(e) { const stream = guac.createFileStream(file.type, file.name); var bufferWriter = new Guacamole.ArrayBufferWriter(stream); bufferWriter.sendData(reader.result); bufferWriter.sendEnd(); }; – Ekrem Aug 22 '23 at 12:54
  • Sadly I still have the same problem with the speed. I use chunks of 1 MB, it sends files successfully (largest I tried was about 300 MB), but it's really slow. Don't know why though... – Darksody Aug 25 '23 at 04:13