I have an azure function in main.py
which is blob triggered. I have two storage accounts (where image data + json data exists) and second (where I want to take json file from first account, append some data and store it to this account).
import logging
import os
import json
import datetime
import azure.functions as func
import numpy as np
from azure.cognitiveservices.vision.customvision.prediction import CustomVisionPredictionClient
from msrest.authentication import ApiKeyCredentials
from azure.storage.blob import BlobServiceClient
from azure.cosmos import CosmosClient
from os import environ
from PIL import Image
import requests
import ast
import io
def main(myblob: func.InputStream):
logging.info(f"Python blob trigger function processed blob \n"
f"Name: {myblob.name}\n"
f"Blob Size: {myblob.length} bytes")
blob_val = myblob.read()
# -- Storage account connection string --
imagedata_acc_string = os.environ["ImageData_ConnectionString"]
# -- Storage account container name --
imagedata_acc_container = os.environ["ImageContainerName"]
blob_service_client = BlobServiceClient.from_connection_string(imagedata_acc_string)
container_client = blob_service_client.get_container_client(imagedata_acc_container)
# Inference Data account name and key
inferencedata_acc_name = os.environ['INFERENCEDATA_ACCOUNT_NAME']
inferencedata_acc_key = os.environ['INFERENCEDATA_ACCOUNT_KEY']
inferencedata_imagedata_acc_container = os.environ['INFERENCEDATA_ACCOUNT_CONTAINER']
.
.
.
.
# Read topview blob
topview_blob = blob_val
# Download the json file
try:
blob_client = container_client.get_blob_client(jsonfilepath)
streamdownloader = blob_client.download_blob()
jsondata = json.loads(streamdownloader.readall())
logging.info(f"date ---- {jsondata['date']}")
logging.info(f" -----Json data downloaded-------")
except Exception as Argument:
logging.exception("Error downloading json blob")
"""
1. Read json data from image storage
2. If json file doesn't exists in inference storage:
create json file
else:
Create a blob client to read json file from inference storage
3. Append json data from image storage to json file in inference storage
4. Upload the data to inference storage
"""
.
.
.
imagejson_data = jsondata
infblob_name = jsondata['date'].replace("/","_") + ".csv"
# -- Inference data Storage account conection string --
infdata_acc_string = os.environ["INFERENCE_ACCOUNT_CONN_STRING"]
# -- Inference data Storage account container name --
infdata_acc_container = os.environ["INFERENCEDATA_ACCOUNT_CONTAINER"]
infblob_service_client = BlobServiceClient.from_connection_string(infdata_acc_string)
infblob_client = infblob_service_client.get_blob_client(
container=infdata_acc_container,
blob=infblob_name)
if not infblob_client.exists():
infblob_client.create_append_blob(
container=infdata_acc_container,
blob=infblob_name)
else:
infblob_client.append_block(imagejson_data,length=len(imagejson_data))
I have a host.json
{
"version": "2.0",
"logging": {
"applicationInsights": {
"samplingSettings": {
"isEnabled": true,
"excludedTypes": "Request"
}
}
},
"extensionBundle": {
"id": "Microsoft.Azure.Functions.ExtensionBundle",
"version": "[2.*, 3.0.0)"
}
}
and all the env variables are defined is local.settings.json
.
I also have a functions.json
{
"scriptFile": "src/main.py",
"bindings": [
{
"name": "myblob",
"type": "blobTrigger",
"direction": "in",
"path": <path-where-image-files-are>,
"connection": "ImageData_ConnectionString"
}
]
}
I know my function is blob triggered but how can I test it locally? I try to put breakpoints in the code but they don't stop there. They just till the end.