Solution 1:
Inject new SAS for an ongoing uploading. Sometimes a SAS will be expiry before a large uploading finishes, for this scenario, we want to request a new SAS token during the uploading instead of starting a new uploading.
Try with this code to update SAS for uploading large file .
async function upload() {
const sasStore = new SasStore();
const pipeline = Azure.StorageURL.newPipeline(
new Azure.AnonymousCredential()
);
// Inject SAS update policy factory into current pipeline
pipeline.factories.unshift(new SasUpdatePolicyFactory(sasStore));
const url = "https://jsv10.blob.core.windows.net/mycontainer/myblob";
const blockBlobURL = new Azure.BlockBlobURL(
`${url}${await sasStore.getValidSASForBlob(url)}`, // A SAS should start with "?"
pipeline
);
const file = document.getElementById("file").files[0];
await Azure.uploadBrowserDataToBlockBlob(
Azure.Aborter.none,
file,
blockBlobURL,
{
maxSingleShotSize: 4 * 1024 * 1024
}
);
}
/ azblob is default exported entry when importing Azure Storage Blob SDK by <script src="azure-storage.blob.js"></script>
// You can also import SDK by npm package with const Azure = require("@azure/storage-blob")
const Azure = azblob;
// SasStore is a class to cache SAS for blobs
class SasStore {
constructor() {
this.sasCache = {};
}
// Get a valid SAS for blob
async getValidSASForBlob(blobURL) {
if (
this.sasCache[blobURL] &&
this.isSasStillValidInNext2Mins(this.sasCache[blobURL])
) {
return this.sasCache[blobURL];
} else {
return (this.sasCache[blobURL] = await this.getNewSasForBlob(blobURL));
}
}
// Return true if "se" section in SAS is still valid in next 2 mins
isSasStillValidInNext2Mins(sas) {
const expiryStringInSas = new URL(`http://host${sas}`).searchParams.get(
"se"
);
return new Date(expiryStringInSas) - new Date() >= 2 * 60 * 1000;
}
// Get a new SAS for blob, we assume a SAS starts with a "?"
async getNewSasForBlob(blobURL) {
// TODO: You need to implement this
return "?newSAS";
}
}
class SasUpdatePolicyFactory {
constructor(sasStore) {
this.sasStore = sasStore;
}
create(nextPolicy, options) {
return new SasUpdatePolicy(nextPolicy, options, this.sasStore);
}
}
class SasUpdatePolicy extends Azure.BaseRequestPolicy {
constructor(nextPolicy, options, sasStore) {
super(nextPolicy, options);
this.sasStore = sasStore;
}
async sendRequest(request) {
const urlObj = new URL(request.url);
const sas = await this.sasStore.getValidSASForBlob(
`${urlObj.origin}${urlObj.pathname}`
);
new URL(`http://hostname${sas}`).searchParams.forEach((value, key) => {
urlObj.searchParams.set(key, value);
});
// Update request URL with latest SAS
request.url = urlObj.toString();
return this._nextPolicy.sendRequest(request);
}
}
Follow this github code for more details
.To create new SAS refer this thread
Solution 2:
Instead of uploading large file, upload file in chunks[method split large file into smaller sizes]. upload the file in chunks so that bigger files can be uploaded without reading them in completely in a single go.
More details refer this document