I have created a storage event trigger to trigger a pipeline in Azure Data Factory. This trigger works when I manually put a file in the datalake. But when the file is uploaded from Azure Function, trigger doesn't work.
Below is my function to upload file in datalake.
public void UploadFileToDatalake(FileUpload file, string containerSasUri, string stage, string dir, ILogger log)
{
log.LogInformation("inside uploadFileTodatalake");
UriBuilder sasUri = new UriBuilder(containerSasUri);
DataLakeServiceClient dataLakeServiceClient = new DataLakeServiceClient(sasUri.Uri);
string container = Globals._transientContainer;
var fileSystemClient = dataLakeServiceClient.GetFileSystemClient(container);
string pathFileDL = file.Name;
string directory = dir;
MemoryStream ms = new MemoryStream();
StreamWriter sw = new StreamWriter(ms);
sw.Write(File.ReadAllText(file.FullPath));
sw.Flush();
ms.Seek(0, SeekOrigin.Begin);
file.Content = ms;
DataLakeDirectoryClient directoryClient = fileSystemClient.GetDirectoryClient(directory);
try
{
if (directoryClient.Exists())
{
log.LogInformation(directoryClient.Name);
DataLakeFileClient fileClient = directoryClient.CreateFile(pathFileDL); // 0kb blob
file.Content.Position = 0;
fileClient.Upload(file.Content, true);
log.LogInformation($"{stage}: {file.Name} uploaded to {pathFileDL}");
file.Content.Close();
}
}
finally
{
//fileSystemClient.Delete();
}
}
FileUpload is a model class I am using.
public class FileUpload
{
public string Name { get; set; }
public Stream Content { get; set; }
public string FullPath { get; set; }
}
Thanks in advance.