1

I'm recently trying to deploy my Angular site to Azure Static Website but found some problems.

As far as I can test, using ng build and upload all the files to Blob storage and the site works well, but if I compress my files to save more spaces, things go wrong.

I compress my files by using gzip under Linux, then rename them to clip .gz in the end.

As all files are encoding as gzip, I use the package @azure/storage-blob to upload my files, the script in JS are like this:

const fs = require("fs");

const {BlobServiceClient, StorageSharedKeyCredential} = require("@azure/storage-blob");
//Give every file type a content type
const contentTps = {
  "js": "text/javascript",
  "txt": "text/plain",
  "png": "image/png",
  "css": "text/css",
  "json": "application/json",
  "gif": "image/gif",
  "ico": "image/x-icon",
  "svg": "image/svg+xml",
  "woff": "application/font-woff",
  "html": "text/html"
};
//Should perhaps detect the key handly
const account = "accountName";
const accountKey = "accountKey";
const containerName = "$web";
const fileLocation = "./dist/";
// Use StorageSharedKeyCredential with storage account and account key
// StorageSharedKeyCredential is only avaiable in Node.js runtime, not in browsers
let sharedKeyCredential = new StorageSharedKeyCredential(account, accountKey);
let blobServiceClient = new BlobServiceClient(
  `https://${account}.blob.core.windows.net`,
  sharedKeyCredential
);

//This main function could be edited to fit others situations
async function main() {

  //take container by container name
  let container = blobServiceClient.getContainerClient(containerName);

  //Delete all files in container , to upload it again.
  for await (let blob of container.listBlobsFlat()) {
    await container.deleteBlob(blob.name);
  }
  fs.readdirSync(fileLocation).forEach(file => {
    console.log("file is " + file);
    let stat = fs.lstatSync(fileLocation + "/" + file);
    if (stat.isFile()) {
      let txt = fs.readFileSync(fileLocation + "/" + file);
      //test upload function.
      let blobName = file;
      let index = blobName.lastIndexOf(".");
      let ext = blobName.substr(index + 1);
      console.log("The file end with:" + ext);

      console.log("Uploading block blob" + blobName);
      //should edit content type when upload, otherwise they will all become application/octet-stream, and impossible to open in browser
      container.uploadBlockBlob(blobName, txt, txt.length, {
        "blobHTTPHeaders": {
          "blobContentType":contentTps[ext],
          //not for local deploy test"blobContentEncoding":"gzip"
          blobContentEncoding: "gzip",
        }
      });

      //const uploadResponse = blockBlobClient.uploadFile(fileLocation+"/"+file);
      console.log("Blob was uploaded ");
    }
  });

}

main();

As you can see, I've also change content-type to the one corresponds, instead of application/octet-stream by default.

The operation of upload works still good, but the file online just becomes scrambled code...

Jeremy Meng
  • 420
  • 1
  • 4
  • 12
Tim Lyi
  • 11
  • 1
  • Though I can see that you're setting content-encoding as gzip however can you check that again (just as a sanity check)? – Gaurav Mantri Jan 31 '20 at 14:54
  • @GauravMantri I think the logic is correct, files uploaded in these ways can all be read without scrambled code (except font file, they are born to be not read normally). It's just wired why can't I open it. – Tim Lyi Jan 31 '20 at 16:36
  • Can you share the URL of either your website or one of the files? – Gaurav Mantri Jan 31 '20 at 17:09

1 Answers1

0

You need to compress the content first. Here's an example

  const content = "Hello world hello world Hello world hello world Hello world hello world";
  const gzipped = zlib.gzipSync(content);
  const res = await blockBlobClient.upload(gzipped, gzipped.byteLength, {
    blobHTTPHeaders: {
        blobContentEncoding: "gzip",
        blobContentType: "text/plain; charset=UTF-8"
    }
  })
Jeremy Meng
  • 420
  • 1
  • 4
  • 12