I have a function that is triggered by CosmosDb insert/updates and I copy each document to a storage blob. When debugging, the function fires over and over again for the same handful of documents.
I've tried limiting the number of documents processed, but that makes it process only the same N documents over and over. I've tried raising the RUs on the trigger collection (and the lease collection) and that had no effect.
[FunctionName("Function1")]
public async static Task Run([CosmosDBTrigger(
databaseName: "Events",
collectionName: "DomainEvents",
ConnectionStringSetting = "cosmosConnectionString",
CreateLeaseCollectionIfNotExists = true,
LeaseCollectionName = "DomainEventLeases")]IReadOnlyList<Document> input, ILogger log, ExecutionContext context)
{
if (input != null && input.Count > 0)
{
var config = new ConfigurationBuilder()
.SetBasePath(context.FunctionAppDirectory)
.AddJsonFile("local.settings.json", optional: true, reloadOnChange: true)
.AddEnvironmentVariables()
.Build();
CloudStorageAccount cloudStorageAccount;
if (CloudStorageAccount.TryParse(config["StorageConnectionAppSetting"], out cloudStorageAccount))
{
var client = cloudStorageAccount.CreateCloudBlobClient();
var container = client.GetContainerReference("wormauditlog");
foreach(var thisDocument in input)
{
var blob = container.GetBlockBlobReference(thisDocument.Id);
try
{
await blob.UploadFromByteArrayAsync(thisDocument.ToByteArray(), 0, thisDocument.ToByteArray().Length);
}
catch(Exception e)
{
throw;
}
}
}
else
{
throw new FunctionInvocationException("Bad storage connection string.");
}
}
}