import os, json, logging, datetime as dt import azure.functions as func from azure.storage.blob import BlobServiceClient # Source storage (same one your trigger points to) BLOB_CONN = os.environ["BLOB_CONN"] # set as App Setting CONTAINER = "bank-logs" # container your trigger uses def main(blob: func.InputStream): # 1) Log to App Insights (if configured) logging.info("BlobIngest fired: name=%s len=%d", blob.name, blob.length) # 2) Write a marker blob so we can *see* it fired from Storage itself svc = BlobServiceClient.from_connection_string(BLOB_CONN) marker_name = f"intesa/_diag/processed-{dt.datetime.utcnow().strftime('%Y%m%dT%H%M%S')}.txt" payload = { "source": blob.name, "length": blob.length, "seen_at_utc": dt.datetime.utcnow().isoformat() + "Z" } svc.get_blob_client(CONTAINER, marker_name).upload_blob( json.dumps(payload).encode("utf-8"), overwrite=True )