I'm recently trying to deploy my Angular site to Azure Static Website but found some problems.
As far as I can test, using ng build and upload all the files to Blob storage and the site works well, but if I compress my files to save more spaces, things go wrong.
I compress my files by using gzip under Linux, then rename them to clip .gz in the end.
As all files are encoding as gzip, I use the package @azure/storage-blob to upload my files, the script in JS are like this:
const fs = require("fs");
const {BlobServiceClient, StorageSharedKeyCredential} = require("@azure/storage-blob");
//Give every file type a content type
const contentTps = {
"js": "text/javascript",
"txt": "text/plain",
"png": "image/png",
"css": "text/css",
"json": "application/json",
"gif": "image/gif",
"ico": "image/x-icon",
"svg": "image/svg+xml",
"woff": "application/font-woff",
"html": "text/html"
};
//Should perhaps detect the key handly
const account = "accountName";
const accountKey = "accountKey";
const containerName = "$web";
const fileLocation = "./dist/";
// Use StorageSharedKeyCredential with storage account and account key
// StorageSharedKeyCredential is only avaiable in Node.js runtime, not in browsers
let sharedKeyCredential = new StorageSharedKeyCredential(account, accountKey);
let blobServiceClient = new BlobServiceClient(
`https://${account}.blob.core.windows.net`,
sharedKeyCredential
);
//This main function could be edited to fit others situations
async function main() {
//take container by container name
let container = blobServiceClient.getContainerClient(containerName);
//Delete all files in container , to upload it again.
for await (let blob of container.listBlobsFlat()) {
await container.deleteBlob(blob.name);
}
fs.readdirSync(fileLocation).forEach(file => {
console.log("file is " + file);
let stat = fs.lstatSync(fileLocation + "/" + file);
if (stat.isFile()) {
let txt = fs.readFileSync(fileLocation + "/" + file);
//test upload function.
let blobName = file;
let index = blobName.lastIndexOf(".");
let ext = blobName.substr(index + 1);
console.log("The file end with:" + ext);
console.log("Uploading block blob" + blobName);
//should edit content type when upload, otherwise they will all become application/octet-stream, and impossible to open in browser
container.uploadBlockBlob(blobName, txt, txt.length, {
"blobHTTPHeaders": {
"blobContentType":contentTps[ext],
//not for local deploy test"blobContentEncoding":"gzip"
blobContentEncoding: "gzip",
}
});
//const uploadResponse = blockBlobClient.uploadFile(fileLocation+"/"+file);
console.log("Blob was uploaded ");
}
});
}
main();
As you can see, I've also change content-type to the one corresponds, instead of application/octet-stream by default.
The operation of upload works still good, but the file online just becomes scrambled code...