From 7f3d3c74b5874dece6131e5cb353be9643b6912a Mon Sep 17 00:00:00 2001 From: VishalS-Microsoft Date: Tue, 31 Mar 2026 10:13:55 +0530 Subject: [PATCH 01/25] feat: Add weekly schedule for Azure Template validation,split azure-dev to azd-template-validation --- .github/workflows/azd-template-validation.yml | 40 +++++++++++ .github/workflows/azure-dev.yaml | 66 ++++++++++++------- 2 files changed, 81 insertions(+), 25 deletions(-) create mode 100644 .github/workflows/azd-template-validation.yml diff --git a/.github/workflows/azd-template-validation.yml b/.github/workflows/azd-template-validation.yml new file mode 100644 index 00000000..c1918e06 --- /dev/null +++ b/.github/workflows/azd-template-validation.yml @@ -0,0 +1,40 @@ +name: AZD Template Validation +on: + schedule: + - cron: '30 1 * * 4' # Every Thursday at 7:00 AM IST (1:30 AM UTC) + workflow_dispatch: + push: + branches: + - psl-weeklyschedule-cps + +permissions: + contents: read + id-token: write + pull-requests: write + +jobs: + template_validation: + runs-on: ubuntu-latest + name: azd template validation + environment: production + steps: + - uses: actions/checkout@v4 + + - uses: microsoft/template-validation-action@v0.4.3 + with: + validateAzd: ${{ vars.TEMPLATE_VALIDATE_AZD }} + validateTests: ${{ vars.TEMPLATE_VALIDATE_TESTS }} + useDevContainer: ${{ vars.TEMPLATE_USE_DEV_CONTAINER }} + id: validation + env: + AZURE_CLIENT_ID: ${{ secrets.AZURE_CLIENT_ID }} + AZURE_TENANT_ID: ${{ secrets.AZURE_TENANT_ID }} + AZURE_SUBSCRIPTION_ID: ${{ secrets.AZURE_SUBSCRIPTION_ID }} + AZURE_ENV_NAME: ${{ vars.AZURE_ENV_NAME }} + AZURE_LOCATION: ${{ vars.AZURE_LOCATION }} + AZURE_AI_SERVICE_LOCATION: ${{ vars.AZURE_LOCATION }} + AZURE_AI_MODEL_CAPACITY: 1 # keep low to avoid potential quota issues + GITHUB_TOKEN: ${{ secrets.AZD_GITHUB_TOKEN }} + + - name: print result + run: cat ${{ steps.validation.outputs.resultFile }} diff --git a/.github/workflows/azure-dev.yaml b/.github/workflows/azure-dev.yaml index 25e35625..eda660bb 100644 --- a/.github/workflows/azure-dev.yaml +++ b/.github/workflows/azure-dev.yaml @@ -1,38 +1,54 @@ -name: Azure Template Validation +name: Azure Dev Deploy + on: workflow_dispatch: + push: + branches: + - psl-weeklyschedule-cps permissions: contents: read id-token: write - pull-requests: write jobs: - template_validation_job: - environment: production + deploy: runs-on: ubuntu-latest - name: Template validation - + environment: production + env: + AZURE_CLIENT_ID: ${{ secrets.AZURE_CLIENT_ID }} + AZURE_TENANT_ID: ${{ secrets.AZURE_TENANT_ID }} + AZURE_SUBSCRIPTION_ID: ${{ secrets.AZURE_SUBSCRIPTION_ID }} + AZURE_ENV_NAME: ${{ vars.AZURE_ENV_NAME }} + AZURE_LOCATION: ${{ vars.AZURE_LOCATION }} + AZURE_DEV_COLLECT_TELEMETRY: ${{ vars.AZURE_DEV_COLLECT_TELEMETRY }} steps: - # Step 1: Checkout the code from your repository - - name: Checkout code - uses: actions/checkout@v5 + - name: Checkout Code + uses: actions/checkout@v4 + + - name: Install azd + uses: Azure/setup-azd@v2 - # Step 2: Validate the Azure template using microsoft/template-validation-action - - name: Validate Azure Template - uses: microsoft/template-validation-action@v0.4.3 - id: validation + - name: Login to Azure + uses: azure/login@v2 with: - useDevContainer: false - env: - AZURE_CLIENT_ID: ${{ secrets.AZURE_CLIENT_ID }} - AZURE_TENANT_ID: ${{ secrets.AZURE_TENANT_ID }} - AZURE_SUBSCRIPTION_ID: ${{ secrets.AZURE_SUBSCRIPTION_ID }} - AZURE_ENV_NAME: ${{ secrets.AZURE_ENV_NAME }} - AZURE_LOCATION: ${{ secrets.AZURE_LOCATION }} - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - AZURE_DEV_COLLECT_TELEMETRY: ${{ vars.AZURE_DEV_COLLECT_TELEMETRY }} + client-id: ${{ secrets.AZURE_CLIENT_ID }} + tenant-id: ${{ secrets.AZURE_TENANT_ID }} + subscription-id: ${{ secrets.AZURE_SUBSCRIPTION_ID }} + + - name: Login to AZD + shell: bash + run: | + azd auth login \ + --client-id "$AZURE_CLIENT_ID" \ + --federated-credential-provider "github" \ + --tenant-id "$AZURE_TENANT_ID" - # Step 3: Print the result of the validation - - name: Print result - run: cat ${{ steps.validation.outputs.resultFile }} + - name: Provision and Deploy + shell: bash + run: | + if ! azd env select "$AZURE_ENV_NAME"; then + azd env new "$AZURE_ENV_NAME" --subscription "$AZURE_SUBSCRIPTION_ID" --location "$AZURE_LOCATION" --no-prompt + fi + azd config set defaults.subscription "$AZURE_SUBSCRIPTION_ID" + azd env set AZURE_AI_SERVICE_LOCATION="$AZURE_LOCATION" + azd up --no-prompt From 1dc1ec9f9d54fb8c54eec79703eb2454b6b02c5e Mon Sep 17 00:00:00 2001 From: VishalS-Microsoft Date: Tue, 31 Mar 2026 12:04:58 +0530 Subject: [PATCH 02/25] fix: Update environment variable for AI deployments location in azure-dev.yaml --- .github/workflows/azure-dev.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/azure-dev.yaml b/.github/workflows/azure-dev.yaml index eda660bb..46e03cc7 100644 --- a/.github/workflows/azure-dev.yaml +++ b/.github/workflows/azure-dev.yaml @@ -50,5 +50,5 @@ jobs: azd env new "$AZURE_ENV_NAME" --subscription "$AZURE_SUBSCRIPTION_ID" --location "$AZURE_LOCATION" --no-prompt fi azd config set defaults.subscription "$AZURE_SUBSCRIPTION_ID" - azd env set AZURE_AI_SERVICE_LOCATION="$AZURE_LOCATION" + azd env set AZURE_ENV_AI_DEPLOYMENTS_LOCATION="$AZURE_LOCATION" azd up --no-prompt From b380b4b8d5fe366ef0929b8715acbd2c31b26a48 Mon Sep 17 00:00:00 2001 From: VishalS-Microsoft Date: Tue, 31 Mar 2026 12:19:38 +0530 Subject: [PATCH 03/25] feat: Add AZURE_ENV_MODEL_CAPACITY to Azure Dev Deploy workflow --- .github/workflows/azure-dev.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/azure-dev.yaml b/.github/workflows/azure-dev.yaml index 46e03cc7..1a1174fc 100644 --- a/.github/workflows/azure-dev.yaml +++ b/.github/workflows/azure-dev.yaml @@ -20,6 +20,7 @@ jobs: AZURE_SUBSCRIPTION_ID: ${{ secrets.AZURE_SUBSCRIPTION_ID }} AZURE_ENV_NAME: ${{ vars.AZURE_ENV_NAME }} AZURE_LOCATION: ${{ vars.AZURE_LOCATION }} + AZURE_ENV_MODEL_CAPACITY: 1 # keep low to avoid potential quota issues AZURE_DEV_COLLECT_TELEMETRY: ${{ vars.AZURE_DEV_COLLECT_TELEMETRY }} steps: - name: Checkout Code From 5961173586f1e1b7747bde65a20d7ef0e0c9f95f Mon Sep 17 00:00:00 2001 From: VishalS-Microsoft Date: Tue, 31 Mar 2026 13:02:39 +0530 Subject: [PATCH 04/25] fix: Update GITHUB_TOKEN secret reference in azd-template-validation.yml --- .github/workflows/azd-template-validation.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/azd-template-validation.yml b/.github/workflows/azd-template-validation.yml index c1918e06..d94a4dcc 100644 --- a/.github/workflows/azd-template-validation.yml +++ b/.github/workflows/azd-template-validation.yml @@ -34,7 +34,7 @@ jobs: AZURE_LOCATION: ${{ vars.AZURE_LOCATION }} AZURE_AI_SERVICE_LOCATION: ${{ vars.AZURE_LOCATION }} AZURE_AI_MODEL_CAPACITY: 1 # keep low to avoid potential quota issues - GITHUB_TOKEN: ${{ secrets.AZD_GITHUB_TOKEN }} + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: print result run: cat ${{ steps.validation.outputs.resultFile }} From 757f3d79e992baf0aa0888299d2876585b612c69 Mon Sep 17 00:00:00 2001 From: Shreyas-Microsoft Date: Tue, 31 Mar 2026 19:25:27 +0530 Subject: [PATCH 05/25] fix post deployment script --- infra/scripts/post_deployment.sh | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/infra/scripts/post_deployment.sh b/infra/scripts/post_deployment.sh index 2a3e5d0c..2f9eab9c 100644 --- a/infra/scripts/post_deployment.sh +++ b/infra/scripts/post_deployment.sh @@ -98,8 +98,8 @@ else # Read schema entries from manifest SCHEMA_COUNT=$(cat "$SCHEMA_INFO_FILE" | grep -o '"File"' | wc -l) - REGISTERED_IDS="" - REGISTERED_NAMES="" + REGISTERED_IDS=() + REGISTERED_NAMES=() for idx in $(seq 0 $((SCHEMA_COUNT - 1))); do # Parse entry fields using grep/sed (no python needed) @@ -128,8 +128,8 @@ else if [ -n "$EXISTING_ID" ]; then echo " Schema '$CLASS_NAME' already exists with ID: $EXISTING_ID" - REGISTERED_IDS="$REGISTERED_IDS $EXISTING_ID" - REGISTERED_NAMES="$REGISTERED_NAMES $CLASS_NAME" + REGISTERED_IDS+=("$EXISTING_ID") + REGISTERED_NAMES+=("$CLASS_NAME") continue fi @@ -148,8 +148,8 @@ else if [ "$HTTP_CODE" = "200" ]; then SCHEMA_ID=$(echo "$BODY" | sed 's/.*"Id"[[:space:]]*:[[:space:]]*"\([^"]*\)".*/\1/') echo " Successfully registered: $DESCRIPTION's Schema Id - $SCHEMA_ID" - REGISTERED_IDS="$REGISTERED_IDS $SCHEMA_ID" - REGISTERED_NAMES="$REGISTERED_NAMES $CLASS_NAME" + REGISTERED_IDS+=("$SCHEMA_ID") + REGISTERED_NAMES+=("$CLASS_NAME") else echo " Failed to upload '$FILE_NAME'. HTTP Status: $HTTP_CODE" echo " Error Response: $BODY" @@ -205,10 +205,9 @@ else ALREADY_IN_SET=$(curl -s "${SCHEMASETVAULT_URL}${SCHEMASET_ID}/schemas" 2>/dev/null || echo "[]") # Iterate over registered schemas - IDX=0 - for SCHEMA_ID in $REGISTERED_IDS; do - IDX=$((IDX + 1)) - CLASS_NAME=$(echo "$REGISTERED_NAMES" | tr ' ' '\n' | sed -n "${IDX}p") + for i in "${!REGISTERED_IDS[@]}"; do + SCHEMA_ID="${REGISTERED_IDS[$i]}" + CLASS_NAME="${REGISTERED_NAMES[$i]}" if echo "$ALREADY_IN_SET" | grep -q "\"Id\"[[:space:]]*:[[:space:]]*\"$SCHEMA_ID\""; then echo " Schema '$CLASS_NAME' ($SCHEMA_ID) already in schema set - skipped" @@ -236,5 +235,6 @@ else echo "" echo "============================================================" echo "Schema registration process completed." + echo " Schemas registered: ${#REGISTERED_IDS[@]}" echo "============================================================" fi From 101aa47679a1e9027fadc3a2e50d98962f71392a Mon Sep 17 00:00:00 2001 From: VishalS-Microsoft Date: Wed, 1 Apr 2026 11:46:42 +0530 Subject: [PATCH 06/25] fix: Remove push trigger for psl-weeklyschedule-cps branch in Azure Dev Deploy workflow --- .github/workflows/azure-dev.yaml | 3 --- 1 file changed, 3 deletions(-) diff --git a/.github/workflows/azure-dev.yaml b/.github/workflows/azure-dev.yaml index 1a1174fc..a8013a9f 100644 --- a/.github/workflows/azure-dev.yaml +++ b/.github/workflows/azure-dev.yaml @@ -2,9 +2,6 @@ name: Azure Dev Deploy on: workflow_dispatch: - push: - branches: - - psl-weeklyschedule-cps permissions: contents: read From 3ba61cd59b0c9395850f1688764cb67b2e6e31fc Mon Sep 17 00:00:00 2001 From: VishalS-Microsoft Date: Thu, 2 Apr 2026 10:43:16 +0530 Subject: [PATCH 07/25] fix: Remove push trigger for psl-weeklyschedule-cps branch in AZD template validation workflow --- .github/workflows/azd-template-validation.yml | 3 --- 1 file changed, 3 deletions(-) diff --git a/.github/workflows/azd-template-validation.yml b/.github/workflows/azd-template-validation.yml index d94a4dcc..b63cf52d 100644 --- a/.github/workflows/azd-template-validation.yml +++ b/.github/workflows/azd-template-validation.yml @@ -3,9 +3,6 @@ on: schedule: - cron: '30 1 * * 4' # Every Thursday at 7:00 AM IST (1:30 AM UTC) workflow_dispatch: - push: - branches: - - psl-weeklyschedule-cps permissions: contents: read From c73397cc1e4b03826102b45a2b8673d5e8574d9c Mon Sep 17 00:00:00 2001 From: VishalS-Microsoft Date: Thu, 2 Apr 2026 12:43:06 +0530 Subject: [PATCH 08/25] fix: Update push trigger configuration for psl-weeklyschedule-cps branch in azd-template-validation.yml --- .github/workflows/azd-template-validation.yml | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/.github/workflows/azd-template-validation.yml b/.github/workflows/azd-template-validation.yml index b63cf52d..bdcb933d 100644 --- a/.github/workflows/azd-template-validation.yml +++ b/.github/workflows/azd-template-validation.yml @@ -3,6 +3,9 @@ on: schedule: - cron: '30 1 * * 4' # Every Thursday at 7:00 AM IST (1:30 AM UTC) workflow_dispatch: + push: + branches: + - psl-weeklyschedule-cps permissions: contents: read @@ -29,8 +32,8 @@ jobs: AZURE_SUBSCRIPTION_ID: ${{ secrets.AZURE_SUBSCRIPTION_ID }} AZURE_ENV_NAME: ${{ vars.AZURE_ENV_NAME }} AZURE_LOCATION: ${{ vars.AZURE_LOCATION }} - AZURE_AI_SERVICE_LOCATION: ${{ vars.AZURE_LOCATION }} - AZURE_AI_MODEL_CAPACITY: 1 # keep low to avoid potential quota issues + AZURE_ENV_AI_DEPLOYMENTS_LOCATION: ${{ vars.AZURE_LOCATION }} + AZURE_ENV_MODEL_CAPACITY: 1 # keep low to avoid potential quota issues GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: print result From 9f8b5ec97a3ae7b0ca41b0bec32939492bc30e78 Mon Sep 17 00:00:00 2001 From: VishalS-Microsoft Date: Thu, 2 Apr 2026 14:33:34 +0530 Subject: [PATCH 09/25] fix: Remove push trigger for psl-weeklyschedule-cps branch in azd-template-validation.yml --- .github/workflows/azd-template-validation.yml | 3 --- 1 file changed, 3 deletions(-) diff --git a/.github/workflows/azd-template-validation.yml b/.github/workflows/azd-template-validation.yml index bdcb933d..3eff786c 100644 --- a/.github/workflows/azd-template-validation.yml +++ b/.github/workflows/azd-template-validation.yml @@ -3,9 +3,6 @@ on: schedule: - cron: '30 1 * * 4' # Every Thursday at 7:00 AM IST (1:30 AM UTC) workflow_dispatch: - push: - branches: - - psl-weeklyschedule-cps permissions: contents: read From d0018f4e26e607ea33d38c38320371725b83e23d Mon Sep 17 00:00:00 2001 From: VishalS-Microsoft Date: Thu, 2 Apr 2026 19:14:46 +0530 Subject: [PATCH 10/25] fix: Update timestamp handling and environment name in workflow files --- .github/workflows/azd-template-validation.yml | 8 +++++++- .github/workflows/azure-dev.yaml | 5 +++++ 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/.github/workflows/azd-template-validation.yml b/.github/workflows/azd-template-validation.yml index 3eff786c..0397a5a1 100644 --- a/.github/workflows/azd-template-validation.yml +++ b/.github/workflows/azd-template-validation.yml @@ -3,6 +3,9 @@ on: schedule: - cron: '30 1 * * 4' # Every Thursday at 7:00 AM IST (1:30 AM UTC) workflow_dispatch: + push: + branches: + - psl-weeklyschedule-cps permissions: contents: read @@ -17,6 +20,9 @@ jobs: steps: - uses: actions/checkout@v4 + - name: Set timestamp + run: echo "HHMM=$(date -u +'%H%M')" >> $GITHUB_ENV + - uses: microsoft/template-validation-action@v0.4.3 with: validateAzd: ${{ vars.TEMPLATE_VALIDATE_AZD }} @@ -27,7 +33,7 @@ jobs: AZURE_CLIENT_ID: ${{ secrets.AZURE_CLIENT_ID }} AZURE_TENANT_ID: ${{ secrets.AZURE_TENANT_ID }} AZURE_SUBSCRIPTION_ID: ${{ secrets.AZURE_SUBSCRIPTION_ID }} - AZURE_ENV_NAME: ${{ vars.AZURE_ENV_NAME }} + AZURE_ENV_NAME: azd-${{ vars.AZURE_ENV_NAME }}-${{ env.HHMM }} AZURE_LOCATION: ${{ vars.AZURE_LOCATION }} AZURE_ENV_AI_DEPLOYMENTS_LOCATION: ${{ vars.AZURE_LOCATION }} AZURE_ENV_MODEL_CAPACITY: 1 # keep low to avoid potential quota issues diff --git a/.github/workflows/azure-dev.yaml b/.github/workflows/azure-dev.yaml index a8013a9f..1847f9d5 100644 --- a/.github/workflows/azure-dev.yaml +++ b/.github/workflows/azure-dev.yaml @@ -23,6 +23,11 @@ jobs: - name: Checkout Code uses: actions/checkout@v4 + - name: Set timestamp and env name + run: | + HHMM=$(date -u +'%H%M') + echo "AZURE_ENV_NAME=azd-${{ vars.AZURE_ENV_NAME }}-${HHMM}" >> $GITHUB_ENV + - name: Install azd uses: Azure/setup-azd@v2 From 375e0fdedbfc5aaa986c2ab62e79cc85bcb9c171 Mon Sep 17 00:00:00 2001 From: VishalS-Microsoft Date: Thu, 2 Apr 2026 19:53:27 +0530 Subject: [PATCH 11/25] fix: Remove push trigger for psl-weeklyschedule-cps branch in azd-template-validation.yml --- .github/workflows/azd-template-validation.yml | 3 --- 1 file changed, 3 deletions(-) diff --git a/.github/workflows/azd-template-validation.yml b/.github/workflows/azd-template-validation.yml index 0397a5a1..1853099f 100644 --- a/.github/workflows/azd-template-validation.yml +++ b/.github/workflows/azd-template-validation.yml @@ -3,9 +3,6 @@ on: schedule: - cron: '30 1 * * 4' # Every Thursday at 7:00 AM IST (1:30 AM UTC) workflow_dispatch: - push: - branches: - - psl-weeklyschedule-cps permissions: contents: read From 774da96e8edbad1087f2e6a87f695f460b8e3f93 Mon Sep 17 00:00:00 2001 From: Shreyas-Microsoft Date: Fri, 3 Apr 2026 13:33:00 +0530 Subject: [PATCH 12/25] Add troubleshooot for 403 - content understanding --- docs/TroubleShootingSteps.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/TroubleShootingSteps.md b/docs/TroubleShootingSteps.md index 182907fc..6d4d60cd 100644 --- a/docs/TroubleShootingSteps.md +++ b/docs/TroubleShootingSteps.md @@ -27,6 +27,7 @@ Use these as quick reference guides to unblock your deployments. | **InternalSubscriptionIsOverQuotaForSku** | Subscription quota exceeded for the requested SKU | [View Solution](#quota--capacity-limitations) | | **InvalidResourceGroup** | Invalid resource group configuration | [View Solution](#resource-group--deployment-management) | | **RequestDisallowedByPolicy** | Azure Policy blocking the requested operation | [View Solution](#subscription--access-issues) | +| **403 Forbidden - Content Understanding** | Content Understanding returns 403 in WAF/private networking deployment | [View Solution](#network--infrastructure-configuration) | ## 📖 Table of Contents @@ -127,6 +128,7 @@ Use these as quick reference guides to unblock your deployments. | **RouteTableCannotBeAttachedForAzureBastionSubnet** | Route table attached to Azure Bastion subnet | This error occurs because Azure Bastion subnet (`AzureBastionSubnet`) has a platform restriction that prevents route tables from being attached.

**How to reproduce:**
  • In `virtualNetwork.bicep`, add `attachRouteTable: true` to the `AzureBastionSubnet` configuration:
    `{ name: 'AzureBastionSubnet', addressPrefixes: ['10.0.10.0/26'], attachRouteTable: true }`
  • Add a Route Table module to the template
  • Update subnet creation to attach route table conditionally:
    `routeTableResourceId: subnet.?attachRouteTable == true ? routeTable.outputs.resourceId : null`
  • Deploy the template → Azure throws `RouteTableCannotBeAttachedForAzureBastionSubnet`

**Resolution:**
  • Remove the `attachRouteTable: true` flag from `AzureBastionSubnet` configuration
  • Ensure no route table is associated with `AzureBastionSubnet`
  • Route tables can only be attached to other subnets, not `AzureBastionSubnet`
  • For more details, refer to [Azure Bastion subnet requirements](https://learn.microsoft.com/en-us/azure/bastion/configuration-settings#subnet)
| | **VMSizeIsNotPermittedToEnableAcceleratedNetworking** | VM size does not support accelerated networking | This error occurs when you attempt to enable accelerated networking on a VM size that does not support it. This deployment's jumpbox VM **requires** accelerated networking.

**Default VM size:** `Standard_D2s_v5` — supports accelerated networking.

**How this error happens:**
  • You override the VM size (via `AZURE_ENV_VM_SIZE`) with a size that doesn't support accelerated networking (e.g., `Standard_A2m_v2`, A-series, or B-series VMs)
  • Azure rejects the deployment with `VMSizeIsNotPermittedToEnableAcceleratedNetworking`

**Resolution:**
  • Use the default `Standard_D2s_v5` (recommended)
  • If overriding VM size, choose one that supports accelerated networking:
    `Standard_D2s_v4`, `Standard_D2as_v5` (AMD), `Standard_D2s_v3`
  • Verify VM size supports accelerated networking:
    `az vm list-skus --location --size --query "[?capabilities[?name=='AcceleratedNetworkingEnabled' && value=='True']]"`
  • Avoid A-series and B-series VMs — they do not support accelerated networking
  • See [VM sizes with accelerated networking](https://learn.microsoft.com/en-us/azure/virtual-network/accelerated-networking-overview)
| | **NetworkSecurityGroupNotCompliantForAzureBastionSubnet** / **SecurityRuleParameterContainsUnsupportedValue** | NSG rules blocking required Azure Bastion ports | This error occurs when the Network Security Group (NSG) attached to `AzureBastionSubnet` explicitly denies inbound TCP ports 443 and/or 4443, which Azure Bastion requires for management and tunneling.

**How to reproduce:**
  • Deploy the template with `enablePrivateNetworking=true` so the virtualNetwork module creates `AzureBastionSubnet` and a Network Security Group that denies ports 443 and 4443
  • Attempt to deploy Azure Bastion into that subnet
  • During validation, Bastion detects the deny rules and fails with `NetworkSecurityGroupNotCompliantForAzureBastionSubnet`

**Resolution:**
  • **Remove or modify deny rules** for ports 443 and 4443 in the NSG attached to `AzureBastionSubnet`
  • **Ensure required inbound rules** per [Azure Bastion NSG requirements](https://learn.microsoft.com/en-us/azure/bastion/bastion-nsg)
  • **Use Bicep conditions** to skip NSG attachments for `AzureBastionSubnet` if deploying Bastion
  • **Validate the NSG configuration** before deploying Bastion into the subnet
| +| **403 Forbidden - Content Understanding** | Azure AI Content Understanding returns 403 Forbidden in WAF (private networking) deployment | This error occurs when the **Azure AI Content Understanding** service returns a `403 Forbidden` response during document processing in a **WAF-enabled (private networking)** deployment.

**Why this happens:**
In WAF deployments (`enablePrivateNetworking=true`), the Content Understanding AI Services account (`aicu-`) is configured with `publicNetworkAccess: Disabled`. All traffic must flow through the **private endpoint** (`pep-aicu-`) and resolve via private DNS zones (`privatelink.cognitiveservices.azure.com`, `privatelink.services.ai.azure.com`, `privatelink.contentunderstanding.ai.azure.com`). If any part of this chain is misconfigured, the request either reaches the public endpoint (which is blocked) or fails to route entirely, resulting in a 403.

**Common causes:**
  • Private DNS zones not linked to the VNet — DNS resolution falls back to the public IP, which is blocked
  • Private endpoint connection is not in **Approved** state
  • Content Understanding is deployed in a different region (`contentUnderstandingLocation`, defaults to `WestUS`) than the main deployment — the private endpoint still works cross-region, but DNS misconfiguration is more likely
  • Container Apps are not injected into the VNet or are on a subnet that cannot reach the private endpoint
  • Managed Identity used by the Container App does not have the required **Cognitive Services User** role on the Content Understanding resource

**Resolution:**
  • **Verify private endpoint status:**
    `az network private-endpoint show --name pep-aicu- --resource-group --query "privateLinkServiceConnections[0].privateLinkServiceConnectionState.status"`
    Expected: `Approved`
  • **Verify private DNS zone VNet links:**
    `az network private-dns zone list --resource-group -o table`
    Ensure `privatelink.cognitiveservices.azure.com`, `privatelink.services.ai.azure.com`, and `privatelink.contentunderstanding.ai.azure.com` all have VNet links
  • **Test DNS resolution from the jumpbox VM** (inside the VNet):
    `nslookup aicu-.cognitiveservices.azure.com`
    Should resolve to a private IP (e.g., `10.x.x.x`), NOT a public IP
  • **Verify RBAC role assignments:** Ensure the Container App managed identity has **Cognitive Services User** role on the Content Understanding resource:
    `az role assignment list --scope /subscriptions//resourceGroups//providers/Microsoft.CognitiveServices/accounts/aicu- --query "[?roleDefinitionName=='Cognitive Services User']" -o table`
  • **Check Container App VNet integration:** Confirm the Container App Environment is deployed into the VNet and can reach the backend subnet where the private endpoint resides
  • **Redeploy if needed:**
    `azd up`

**Reference:**
  • [Configure private endpoints for Azure AI Services](https://learn.microsoft.com/en-us/azure/ai-services/cognitive-services-virtual-networks)
  • [Azure Private DNS zones](https://learn.microsoft.com/en-us/azure/dns/private-dns-overview)
| --------------------------------- From 50eb37511d28aa33a0087e4d8a2b1357e1ab7491 Mon Sep 17 00:00:00 2001 From: "Prekshith D J (Persistent Systems Inc)" Date: Mon, 6 Apr 2026 17:12:47 +0530 Subject: [PATCH 13/25] Filter the paths for pipeline run --- .github/workflows/create-release.yml | 5 +++++ src/ContentProcessor/azure_cicd.yml | 3 +++ src/ContentProcessorAPI/azure_cicd.yaml | 3 +++ src/ContentProcessorWeb/azure_cicd.yaml | 3 +++ src/ContentProcessorWorkflow/azure_cicd.yaml | 3 +++ 5 files changed, 17 insertions(+) diff --git a/.github/workflows/create-release.yml b/.github/workflows/create-release.yml index 128d4f4b..10dc72bb 100644 --- a/.github/workflows/create-release.yml +++ b/.github/workflows/create-release.yml @@ -4,6 +4,11 @@ on: push: branches: - main + paths: + - 'src/**' + - 'infra/**' + - 'azure.yaml' + - '.github/workflows/create-release.yml' permissions: contents: write diff --git a/src/ContentProcessor/azure_cicd.yml b/src/ContentProcessor/azure_cicd.yml index 9abff61b..310c4df1 100644 --- a/src/ContentProcessor/azure_cicd.yml +++ b/src/ContentProcessor/azure_cicd.yml @@ -2,6 +2,9 @@ trigger: branches: include: - main + paths: + include: + - src/ContentProcessor/** # When multiple commits land quickly on main, only run the latest. batch: true diff --git a/src/ContentProcessorAPI/azure_cicd.yaml b/src/ContentProcessorAPI/azure_cicd.yaml index c12ec6c3..14ead012 100644 --- a/src/ContentProcessorAPI/azure_cicd.yaml +++ b/src/ContentProcessorAPI/azure_cicd.yaml @@ -2,6 +2,9 @@ trigger: branches: include: - main + paths: + include: + - src/ContentProcessorAPI/** # When multiple commits land quickly on main, only run the latest. batch: true diff --git a/src/ContentProcessorWeb/azure_cicd.yaml b/src/ContentProcessorWeb/azure_cicd.yaml index 791ebf14..3b4ed6a2 100644 --- a/src/ContentProcessorWeb/azure_cicd.yaml +++ b/src/ContentProcessorWeb/azure_cicd.yaml @@ -3,6 +3,9 @@ trigger: include: - main - frontend_dev + paths: + include: + - src/ContentProcessorWeb/** # When multiple commits land quickly on main, only run the latest. batch: true diff --git a/src/ContentProcessorWorkflow/azure_cicd.yaml b/src/ContentProcessorWorkflow/azure_cicd.yaml index 127b7abc..7a787812 100644 --- a/src/ContentProcessorWorkflow/azure_cicd.yaml +++ b/src/ContentProcessorWorkflow/azure_cicd.yaml @@ -2,6 +2,9 @@ trigger: branches: include: - main + paths: + include: + - src/ContentProcessorWorkflow/** # When multiple commits land quickly on main, only run the latest. batch: true From b2782f492469a017f51c168f11bcf8d6f67aa567 Mon Sep 17 00:00:00 2001 From: Shreyas-Microsoft Date: Tue, 7 Apr 2026 20:55:34 +0530 Subject: [PATCH 14/25] Add cognitive services account refresh step to post-deployment script --- infra/main.bicep | 3 +++ infra/scripts/post_deployment.sh | 19 +++++++++++++++++++ 2 files changed, 22 insertions(+) diff --git a/infra/main.bicep b/infra/main.bicep index affe8ff6..ba9d8883 100644 --- a/infra/main.bicep +++ b/infra/main.bicep @@ -1890,5 +1890,8 @@ output CONTAINER_REGISTRY_NAME string = avmContainerRegistry.outputs.name @description('The login server of the Azure Container Registry.') output CONTAINER_REGISTRY_LOGIN_SERVER string = avmContainerRegistry.outputs.loginServer +@description('The name of the Content Understanding AI Services account.') +output CONTENT_UNDERSTANDING_ACCOUNT_NAME string = avmAiServices_cu.outputs.name + @description('The resource group the resources were deployed into.') output AZURE_RESOURCE_GROUP string = resourceGroup().name diff --git a/infra/scripts/post_deployment.sh b/infra/scripts/post_deployment.sh index 2f9eab9c..2e4cec8f 100644 --- a/infra/scripts/post_deployment.sh +++ b/infra/scripts/post_deployment.sh @@ -238,3 +238,22 @@ else echo " Schemas registered: ${#REGISTERED_IDS[@]}" echo "============================================================" fi + +# --- Refresh Content Understanding Cognitive Services account --- +echo "" +echo "============================================================" +echo "Refreshing Content Understanding Cognitive Services account..." +echo "============================================================" + +CU_ACCOUNT_NAME=$(azd env get-value CONTENT_UNDERSTANDING_ACCOUNT_NAME) + +az cognitiveservices account update \ + -g "$RESOURCE_GROUP" \ + -n "$CU_ACCOUNT_NAME" \ + --tags refresh=true + +if [ $? -eq 0 ]; then + echo " ✅ Successfully refreshed Cognitive Services account '$CU_ACCOUNT_NAME'." +else + echo " ❌ Failed to refresh Cognitive Services account '$CU_ACCOUNT_NAME'." +fi From 5eb8592ea03587c0db5cf3f4a949daee7433793e Mon Sep 17 00:00:00 2001 From: Shreyas-Microsoft Date: Tue, 7 Apr 2026 23:50:31 +0530 Subject: [PATCH 15/25] update refresh command to not put all the json --- infra/scripts/post_deployment.sh | 22 +++++++++++++--------- 1 file changed, 13 insertions(+), 9 deletions(-) diff --git a/infra/scripts/post_deployment.sh b/infra/scripts/post_deployment.sh index 2e4cec8f..2b0ee0ad 100644 --- a/infra/scripts/post_deployment.sh +++ b/infra/scripts/post_deployment.sh @@ -245,15 +245,19 @@ echo "============================================================" echo "Refreshing Content Understanding Cognitive Services account..." echo "============================================================" -CU_ACCOUNT_NAME=$(azd env get-value CONTENT_UNDERSTANDING_ACCOUNT_NAME) +CU_ACCOUNT_NAME=$(azd env get-value CONTENT_UNDERSTANDING_ACCOUNT_NAME 2>/dev/null || echo "") -az cognitiveservices account update \ - -g "$RESOURCE_GROUP" \ - -n "$CU_ACCOUNT_NAME" \ - --tags refresh=true - -if [ $? -eq 0 ]; then - echo " ✅ Successfully refreshed Cognitive Services account '$CU_ACCOUNT_NAME'." +if [ -z "$CU_ACCOUNT_NAME" ]; then + echo " ⚠️ CONTENT_UNDERSTANDING_ACCOUNT_NAME not found in azd env. Skipping refresh." else - echo " ❌ Failed to refresh Cognitive Services account '$CU_ACCOUNT_NAME'." + echo " Refreshing account: $CU_ACCOUNT_NAME in resource group: $RESOURCE_GROUP" + if az cognitiveservices account update \ + -g "$RESOURCE_GROUP" \ + -n "$CU_ACCOUNT_NAME" \ + --tags refresh=true \ + --output none; then + echo " ✅ Successfully refreshed Cognitive Services account '$CU_ACCOUNT_NAME'." + else + echo " ❌ Failed to refresh Cognitive Services account '$CU_ACCOUNT_NAME'." + fi fi From c86d72ddee32463cf36c3db3a6c66ad6f1224f40 Mon Sep 17 00:00:00 2001 From: VishalS-Microsoft Date: Wed, 8 Apr 2026 12:31:33 +0530 Subject: [PATCH 16/25] fix: Update workflow configurations to correct environment variable names and add push trigger --- .github/workflows/azd-template-validation.yml | 5 ++++- .github/workflows/azure-dev.yaml | 2 +- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/.github/workflows/azd-template-validation.yml b/.github/workflows/azd-template-validation.yml index 1853099f..51a8679f 100644 --- a/.github/workflows/azd-template-validation.yml +++ b/.github/workflows/azd-template-validation.yml @@ -3,6 +3,9 @@ on: schedule: - cron: '30 1 * * 4' # Every Thursday at 7:00 AM IST (1:30 AM UTC) workflow_dispatch: + push: + branches: + - psl-pipelinefix-cpsv2 permissions: contents: read @@ -32,7 +35,7 @@ jobs: AZURE_SUBSCRIPTION_ID: ${{ secrets.AZURE_SUBSCRIPTION_ID }} AZURE_ENV_NAME: azd-${{ vars.AZURE_ENV_NAME }}-${{ env.HHMM }} AZURE_LOCATION: ${{ vars.AZURE_LOCATION }} - AZURE_ENV_AI_DEPLOYMENTS_LOCATION: ${{ vars.AZURE_LOCATION }} + AZURE_ENV_AI_SERVICE_LOCATION: ${{ vars.AZURE_LOCATION }} AZURE_ENV_MODEL_CAPACITY: 1 # keep low to avoid potential quota issues GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/azure-dev.yaml b/.github/workflows/azure-dev.yaml index 1847f9d5..d7ea0a15 100644 --- a/.github/workflows/azure-dev.yaml +++ b/.github/workflows/azure-dev.yaml @@ -53,5 +53,5 @@ jobs: azd env new "$AZURE_ENV_NAME" --subscription "$AZURE_SUBSCRIPTION_ID" --location "$AZURE_LOCATION" --no-prompt fi azd config set defaults.subscription "$AZURE_SUBSCRIPTION_ID" - azd env set AZURE_ENV_AI_DEPLOYMENTS_LOCATION="$AZURE_LOCATION" + azd env set AZURE_ENV_AI_SERVICE_LOCATION="$AZURE_LOCATION" azd up --no-prompt From 60ae1c56c6c38c7f377018111f97d490218a7933 Mon Sep 17 00:00:00 2001 From: Abdul-Microsoft Date: Wed, 8 Apr 2026 12:49:27 +0530 Subject: [PATCH 17/25] chore: remove unused axios dependency from ContentProcessorWeb Remove axios ^1.13.5 from package.json as all HTTP calls use native fetch via the apiClient.tsx wrapper. Also update documentation references in .github/instructions/ files. - Remove axios from package.json dependencies - Update pnpm-lock.yaml (pnpm install) - Update test-quality.instructions.md to remove axios references - Update code-quality.instructions.md to remove axios from import example Resolves #39065 Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> --- .../.github/instructions/code-quality.instructions.md | 2 +- .../.github/instructions/test-quality.instructions.md | 6 +++--- src/ContentProcessorWeb/package.json | 1 - src/ContentProcessorWeb/pnpm-lock.yaml | 4 ---- 4 files changed, 4 insertions(+), 9 deletions(-) diff --git a/src/ContentProcessorWeb/.github/instructions/code-quality.instructions.md b/src/ContentProcessorWeb/.github/instructions/code-quality.instructions.md index f8025207..d4d84521 100644 --- a/src/ContentProcessorWeb/.github/instructions/code-quality.instructions.md +++ b/src/ContentProcessorWeb/.github/instructions/code-quality.instructions.md @@ -104,7 +104,7 @@ You are performing a systematic code-quality pass on a TypeScript/React codebase - **Group imports** in this order, separated by blank lines: 1. React / React DOM - 2. Third-party libraries (`@fluentui/*`, `react-redux`, `axios`, `react-router-dom`, etc.) + 2. Third-party libraries (`@fluentui/*`, `react-redux`, `react-router-dom`, etc.) 3. Internal modules — hooks, services, store, types 4. Sibling / relative components 5. Style imports (`.scss`, `.css`) diff --git a/src/ContentProcessorWeb/.github/instructions/test-quality.instructions.md b/src/ContentProcessorWeb/.github/instructions/test-quality.instructions.md index 6a019b99..9a8ca34d 100644 --- a/src/ContentProcessorWeb/.github/instructions/test-quality.instructions.md +++ b/src/ContentProcessorWeb/.github/instructions/test-quality.instructions.md @@ -114,7 +114,7 @@ Rules: | `describe` block | PascalCase component/function name | `describe('Header', …)` | | `it` block | starts with "should …" | `it('should show the logo', …)` | | Helper function | `create…` / `render…` / `mock…` | `createMockStore`, `renderHeader` | -| Mock file | `__mocks__/.ts` | `__mocks__/axios.ts` | +| Mock file | `__mocks__/.ts` | `__mocks__/httpUtility.ts` | File naming must mirror the source module: ``` @@ -139,7 +139,7 @@ Focus on UNIT-TESTABLE code — pure logic and isolated components: **MEDIUM PRIORITY** (test with mocks): - **Components with Redux**: use `renderWithProviders` with a preloaded state -- **Components with API calls**: mock `axios` / `httpUtility` to return controlled data +- **Components with API calls**: mock `httpUtility` to return controlled data - **MSAL-protected components**: mock `useAuth` / `useMsal` hooks - **Components with router dependencies**: wrap in `` with initial entries @@ -229,7 +229,7 @@ import '@testing-library/jest-dom'; Use these patterns in order of preference: -### a) `jest.mock` — module-level mocks (axios, services, MSAL) +### a) `jest.mock` — module-level mocks (services, MSAL) ```ts jest.mock('../../Services/httpUtility', () => ({ diff --git a/src/ContentProcessorWeb/package.json b/src/ContentProcessorWeb/package.json index 7d0a412b..c63dfad3 100644 --- a/src/ContentProcessorWeb/package.json +++ b/src/ContentProcessorWeb/package.json @@ -11,7 +11,6 @@ "@fluentui/react-dialog": "^9.16.6", "@fluentui/react-icons": "^2.0.245", "@reduxjs/toolkit": "^2.11.2", - "axios": "^1.13.5", "babel-preset-react-app": "^10.1.0", "contentprocessor_web": "file:", "cra-template-typescript": "1.3.0", diff --git a/src/ContentProcessorWeb/pnpm-lock.yaml b/src/ContentProcessorWeb/pnpm-lock.yaml index 49888f65..328be7c6 100644 --- a/src/ContentProcessorWeb/pnpm-lock.yaml +++ b/src/ContentProcessorWeb/pnpm-lock.yaml @@ -36,9 +36,6 @@ importers: '@reduxjs/toolkit': specifier: ^2.11.2 version: 2.11.2(react-redux@9.2.0(@types/react@18.3.28)(react@18.3.1)(redux@5.0.1))(react@18.3.1) - axios: - specifier: ^1.13.5 - version: 1.14.0 babel-preset-react-app: specifier: ^10.1.0 version: 10.1.0 @@ -10602,7 +10599,6 @@ snapshots: '@fluentui/react-dialog': 9.16.6(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(scheduler@0.23.2) '@fluentui/react-icons': 2.0.318(react@18.3.1) '@reduxjs/toolkit': 2.11.2(react-redux@9.2.0(@types/react@18.3.28)(react@18.3.1)(redux@5.0.1))(react@18.3.1) - axios: 1.14.0 babel-preset-react-app: 10.1.0 cra-template-typescript: 1.3.0 i18next: 25.8.4(typescript@4.9.5) From 7153938933259e83530d90453448012dcb427dc2 Mon Sep 17 00:00:00 2001 From: VishalS-Microsoft Date: Wed, 8 Apr 2026 13:11:04 +0530 Subject: [PATCH 18/25] fix: Remove push trigger from workflow configuration --- .github/workflows/azd-template-validation.yml | 3 --- 1 file changed, 3 deletions(-) diff --git a/.github/workflows/azd-template-validation.yml b/.github/workflows/azd-template-validation.yml index 51a8679f..80805f4d 100644 --- a/.github/workflows/azd-template-validation.yml +++ b/.github/workflows/azd-template-validation.yml @@ -3,9 +3,6 @@ on: schedule: - cron: '30 1 * * 4' # Every Thursday at 7:00 AM IST (1:30 AM UTC) workflow_dispatch: - push: - branches: - - psl-pipelinefix-cpsv2 permissions: contents: read From 258762fd9d0c7793925f7e9dc143f20ac07b539d Mon Sep 17 00:00:00 2001 From: Shreyas-Microsoft Date: Wed, 8 Apr 2026 15:57:14 +0530 Subject: [PATCH 19/25] update ai summary and gap analysis once refresh is clicked --- .../src/Pages/DefaultPage/PanelCenter.tsx | 5 +++-- .../src/Pages/DefaultPage/PanelLeft.tsx | 3 ++- src/ContentProcessorWeb/src/store/slices/leftPanelSlice.ts | 7 ++++++- 3 files changed, 11 insertions(+), 4 deletions(-) diff --git a/src/ContentProcessorWeb/src/Pages/DefaultPage/PanelCenter.tsx b/src/ContentProcessorWeb/src/Pages/DefaultPage/PanelCenter.tsx index 14976f4d..9154c7d3 100644 --- a/src/ContentProcessorWeb/src/Pages/DefaultPage/PanelCenter.tsx +++ b/src/ContentProcessorWeb/src/Pages/DefaultPage/PanelCenter.tsx @@ -149,6 +149,7 @@ const PanelCenter: React.FC = ({ togglePanel }) => { claimDetails: state.centerPanel.claimDetails, claimDetailsLoader: state.centerPanel.claimDetailsLoader, claimCommentSaving: state.centerPanel.claimCommentSaving, + refreshTrigger: state.leftPanel.refreshTrigger, }), shallowEqual ); @@ -186,7 +187,7 @@ const PanelCenter: React.FC = ({ togglePanel }) => { if (store.selectionType === 'document' && (store.activeProcessId != null || store.activeProcessId !== '') && !status.includes(store.selectedItem.status) && store.selectedItem?.process_id === store.activeProcessId) { fetchContent(); } - }, [store.activeProcessId, store.selectedItem, store.selectionType]) + }, [store.activeProcessId, store.selectedItem, store.selectionType, store.refreshTrigger]) // Fetch claim details when a claim is selected useEffect(() => { @@ -194,7 +195,7 @@ const PanelCenter: React.FC = ({ togglePanel }) => { setClaimComment(''); dispatch(fetchClaimDetails({ claimId: store.selectedClaim.id })); } - }, [store.selectionType, store.selectedClaim?.id, dispatch]) + }, [store.selectionType, store.selectedClaim?.id, dispatch, store.refreshTrigger]) // Sync claim comment with API response useEffect(() => { diff --git a/src/ContentProcessorWeb/src/Pages/DefaultPage/PanelLeft.tsx b/src/ContentProcessorWeb/src/Pages/DefaultPage/PanelLeft.tsx index 99337e67..1e5bc9cf 100644 --- a/src/ContentProcessorWeb/src/Pages/DefaultPage/PanelLeft.tsx +++ b/src/ContentProcessorWeb/src/Pages/DefaultPage/PanelLeft.tsx @@ -13,7 +13,7 @@ import { ArrowClockwiseRegular, ArrowUploadRegular, ChevronDoubleLeft20Regular, import { toast } from "react-toastify"; import { useDispatch, useSelector, shallowEqual } from 'react-redux'; -import { fetchSchemaData, fetchSchemasetData, fetchContentTableData, setRefreshGrid, fetchSwaggerData } from '../../store/slices/leftPanelSlice'; +import { fetchSchemaData, fetchSchemasetData, fetchContentTableData, setRefreshGrid, fetchSwaggerData, incrementRefreshTrigger } from '../../store/slices/leftPanelSlice'; import { AppDispatch, RootState } from '../../store'; import { startLoader, stopLoader } from "../../store/slices/loaderSlice"; @@ -80,6 +80,7 @@ const PanelLeft: React.FC = ({ togglePanel }) => { } finally { dispatch(stopLoader("1")); dispatch(setRefreshGrid(false)); + dispatch(incrementRefreshTrigger()); } } diff --git a/src/ContentProcessorWeb/src/store/slices/leftPanelSlice.ts b/src/ContentProcessorWeb/src/store/slices/leftPanelSlice.ts index 26aabe82..8c2b2f9c 100644 --- a/src/ContentProcessorWeb/src/store/slices/leftPanelSlice.ts +++ b/src/ContentProcessorWeb/src/store/slices/leftPanelSlice.ts @@ -27,6 +27,7 @@ export interface LeftPanelState { deleteClaimsLoader: string[]; isGridRefresh: boolean; swaggerJSON: Record | null; + refreshTrigger: number; } interface GridData { @@ -245,6 +246,7 @@ const initialState: LeftPanelState = { deleteFilesLoader: [], deleteClaimsLoader: [], swaggerJSON: null, + refreshTrigger: 0, }; const leftPanelSlice = createSlice({ @@ -268,6 +270,9 @@ const leftPanelSlice = createSlice({ setRefreshGrid: (state, action: PayloadAction) => { state.isGridRefresh = action.payload; }, + incrementRefreshTrigger: (state) => { + state.refreshTrigger += 1; + }, }, extraReducers: (builder) => { builder @@ -406,5 +411,5 @@ const leftPanelSlice = createSlice({ }, }); -export const { setSchemaSelectedOption, setSelectedGridRow, setSelectedClaim, setRefreshGrid } = leftPanelSlice.actions; +export const { setSchemaSelectedOption, setSelectedGridRow, setSelectedClaim, setRefreshGrid, incrementRefreshTrigger } = leftPanelSlice.actions; export default leftPanelSlice.reducer; From 5f9e52b280582acfde18fc32cde643b3f05bf5a4 Mon Sep 17 00:00:00 2001 From: Roopan-Microsoft Date: Thu, 9 Apr 2026 11:34:29 +0530 Subject: [PATCH 20/25] fix: add bicep version requirement (>= 0.33.0) to azure.yaml --- azure.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/azure.yaml b/azure.yaml index 3f34cb0e..8d6200c6 100644 --- a/azure.yaml +++ b/azure.yaml @@ -5,6 +5,7 @@ name: content-processing requiredVersions: azd: '>= 1.18.0 != 1.23.9' + bicep: '>= 0.33.0' metadata: template: content-processing@1.0 From e6af68aa9dc96a3909c130657d9acd8c61ba0974 Mon Sep 17 00:00:00 2001 From: "Prekshith D J (Persistent Systems Inc)" Date: Thu, 9 Apr 2026 11:47:36 +0530 Subject: [PATCH 21/25] fix: Remove create-release.yml path filter changes Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> --- .github/workflows/create-release.yml | 5 ----- 1 file changed, 5 deletions(-) diff --git a/.github/workflows/create-release.yml b/.github/workflows/create-release.yml index 10dc72bb..128d4f4b 100644 --- a/.github/workflows/create-release.yml +++ b/.github/workflows/create-release.yml @@ -4,11 +4,6 @@ on: push: branches: - main - paths: - - 'src/**' - - 'infra/**' - - 'azure.yaml' - - '.github/workflows/create-release.yml' permissions: contents: write From 175cceece8ba648924cb4e5fc6c1cc4c13db4dc0 Mon Sep 17 00:00:00 2001 From: Thanusree-Microsoft <168087422+Thanusree-Microsoft@users.noreply.github.com> Date: Thu, 9 Apr 2026 17:43:06 +0530 Subject: [PATCH 22/25] Update README Added important notes regarding security restrictions and Azure OpenAI quota availability. --- README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/README.md b/README.md index 271ba273..22147dd1 100644 --- a/README.md +++ b/README.md @@ -278,6 +278,8 @@ Follow the quick deploy steps on the deployment guide to deploy this solution
+> **Note**: Some tenants may have additional security restrictions that run periodically and could impact the application (e.g., blocking public network access). If you experience issues or the application stops working, check if these restrictions are the cause. In such cases, consider deploying the WAF-supported version to ensure compliance. To configure, [Click here](./docs/DeploymentGuide.md#31-choose-deployment-type-optional). + > ⚠️ **Important: Check Azure OpenAI Quota Availability**
To ensure sufficient quota is available in your subscription, please follow [quota check instructions guide](./docs/quota_check.md) before you deploy the solution. From 55b120084d663776f5dcf8faae23343a39207fd8 Mon Sep 17 00:00:00 2001 From: Thanusree-Microsoft <168087422+Thanusree-Microsoft@users.noreply.github.com> Date: Thu, 9 Apr 2026 17:43:53 +0530 Subject: [PATCH 23/25] Update Deployment Guide Added note about security restrictions and WAF-supported version. --- docs/DeploymentGuide.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/DeploymentGuide.md b/docs/DeploymentGuide.md index 6c603a3d..44c0546c 100644 --- a/docs/DeploymentGuide.md +++ b/docs/DeploymentGuide.md @@ -6,6 +6,8 @@ This guide walks you through deploying the Content Processing Solution Accelerat 🆘 **Need Help?** If you encounter any issues during deployment, check our [Troubleshooting Guide](./TroubleShootingSteps.md) for solutions to common problems. +> **Note**: Some tenants may have additional security restrictions that run periodically and could impact the application (e.g., blocking public network access). If you experience issues or the application stops working, check if these restrictions are the cause. In such cases, consider deploying the WAF-supported version to ensure compliance. To configure, [Click here](#31-choose-deployment-type-optional). + ## Step 1: Prerequisites & Setup ### 1.1 Azure Account Requirements From b97571051fe0194aff1efb3c0a96317bd85d9170 Mon Sep 17 00:00:00 2001 From: "Prekshith D J (Persistent Systems Inc)" Date: Thu, 9 Apr 2026 18:13:39 +0530 Subject: [PATCH 24/25] Fixed all the code quality issues --- infra/scripts/validate_bicep_params.py | 4 +- .../agent_framework/agent_framework_helper.py | 12 ++--- .../azure_openai_response_retry.py | 20 ++++++-- src/ContentProcessorAPI/app/application.py | 1 + .../app/libs/azure/storage_blob/helper.py | 4 +- .../app/libs/base/application_base.py | 11 +++-- .../app/libs/base/fastapi_protocol.py | 2 +- .../app/routers/claimprocessor.py | 6 ++- .../src/Components/Header/Header.tsx | 2 +- .../UploadContent/UploadFilesModal.tsx | 2 +- .../src/Hooks/useFileType.test.ts | 2 +- .../ProcessQueueGrid/ProcessQueueGrid.tsx | 4 +- .../src/Pages/DefaultPage/PanelCenter.tsx | 1 - .../src/Pages/DefaultPage/PanelRight.tsx | 1 - .../src/store/slices/centerPanelSlice.test.ts | 1 - .../agent_framework/agent_framework_helper.py | 18 ++++--- .../azure_openai_response_retry.py | 20 ++++++-- .../src/main_service.py | 7 ++- .../src/services/content_process_service.py | 3 +- .../src/services/queue_service.py | 49 +++++++++++++++---- .../src/utils/http_request.py | 6 ++- .../tests/conftest.py | 3 +- .../test_application_context_di.py | 2 +- .../tests/unit/steps/test_rai_executor.py | 1 - 24 files changed, 128 insertions(+), 54 deletions(-) diff --git a/infra/scripts/validate_bicep_params.py b/infra/scripts/validate_bicep_params.py index 9c5db6da..34ea8d48 100644 --- a/infra/scripts/validate_bicep_params.py +++ b/infra/scripts/validate_bicep_params.py @@ -108,7 +108,9 @@ def parse_parameters_env_vars(json_path: Path) -> dict[str, list[str]]: data = json.loads(sanitized) params = data.get("parameters", {}) except json.JSONDecodeError: - pass + # Keep validation resilient for partially templated/malformed files: + # if JSON parsing fails, treat as having no parsable parameters. + params = {} # Walk each top-level parameter and scan its entire serialized value # for ${VAR} references from the original text. diff --git a/src/ContentProcessor/src/libs/agent_framework/agent_framework_helper.py b/src/ContentProcessor/src/libs/agent_framework/agent_framework_helper.py index 572aa3c7..de778b6e 100644 --- a/src/ContentProcessor/src/libs/agent_framework/agent_framework_helper.py +++ b/src/ContentProcessor/src/libs/agent_framework/agent_framework_helper.py @@ -143,7 +143,7 @@ def create_client( env_file_encoding: str | None = None, instruction_role: str | None = None, ) -> "AzureOpenAIChatClient": - ... + pass @overload @staticmethod @@ -166,7 +166,7 @@ def create_client( instruction_role: str | None = None, retry_config: RateLimitRetryConfig | None = None, ) -> AzureOpenAIChatClientWithRetry: - ... + pass @overload @staticmethod @@ -190,7 +190,7 @@ def create_client( env_file_path: str | None = None, env_file_encoding: str | None = None, ) -> "AzureOpenAIAssistantsClient": - ... + pass @overload @staticmethod @@ -212,7 +212,7 @@ def create_client( env_file_encoding: str | None = None, instruction_role: str | None = None, ) -> "AzureOpenAIResponsesClient": - ... + pass @overload @staticmethod @@ -235,7 +235,7 @@ def create_client( instruction_role: str | None = None, retry_config: RateLimitRetryConfig | None = None, ) -> AzureOpenAIResponseClientWithRetry: - ... + pass @overload @staticmethod @@ -252,7 +252,7 @@ def create_client( env_file_path: str | None = None, env_file_encoding: str | None = None, ) -> "AzureAIAgentClient": - ... + pass @staticmethod def create_client( diff --git a/src/ContentProcessor/src/libs/agent_framework/azure_openai_response_retry.py b/src/ContentProcessor/src/libs/agent_framework/azure_openai_response_retry.py index ee84eb94..32b0f187 100644 --- a/src/ContentProcessor/src/libs/agent_framework/azure_openai_response_retry.py +++ b/src/ContentProcessor/src/libs/agent_framework/azure_openai_response_retry.py @@ -616,8 +616,15 @@ async def _tail(): if callable(close): try: await close() - except Exception: - pass + except Exception as close_exc: + # Best-effort stream cleanup: ignore close failures so we preserve + # the original exception/retry path. + logger.debug( + "[AOAI_RETRY_STREAM] ignoring stream close failure during retry handling: %s", + _format_exc_brief(close_exc) + if isinstance(close_exc, BaseException) + else str(close_exc), + ) # One-shot retry for context-length failures. if ( @@ -802,8 +809,13 @@ async def _tail(): if callable(close): try: await close() - except Exception: - pass + except Exception as close_error: + # Intentionally suppress close-time failures so we do not + # mask the original streaming exception that triggered retry handling. + logger.debug( + "[AOAI_RETRY_STREAM] ignoring stream close failure during error handling", + exc_info=close_error, + ) # One-shot retry for context-length failures. if ( diff --git a/src/ContentProcessorAPI/app/application.py b/src/ContentProcessorAPI/app/application.py index fb4a0448..a0e3d368 100644 --- a/src/ContentProcessorAPI/app/application.py +++ b/src/ContentProcessorAPI/app/application.py @@ -53,6 +53,7 @@ class Application(Application_Base): def __init__(self): super().__init__(env_file_path=os.path.join(os.path.dirname(__file__), ".env")) + self.bootstrap() def initialize(self): """Build the FastAPI app, attach middleware, routers, and dependencies. diff --git a/src/ContentProcessorAPI/app/libs/azure/storage_blob/helper.py b/src/ContentProcessorAPI/app/libs/azure/storage_blob/helper.py index 355c9fcf..2edefdad 100644 --- a/src/ContentProcessorAPI/app/libs/azure/storage_blob/helper.py +++ b/src/ContentProcessorAPI/app/libs/azure/storage_blob/helper.py @@ -7,6 +7,7 @@ retrieve them during downstream pipeline stages. """ +from azure.core.exceptions import ResourceNotFoundError from azure.storage.blob import BlobServiceClient from app.utils.azure_credential_utils import get_azure_credential @@ -124,7 +125,8 @@ def delete_blob_and_cleanup(self, blob_name, container_name=None): container_client = self._get_container_client(container_name) try: container_client.delete_blob(blob_name) - except Exception: + except ResourceNotFoundError: + # Blob already absent; continue with folder cleanup checks. pass blobs = container_client.list_blobs() diff --git a/src/ContentProcessorAPI/app/libs/base/application_base.py b/src/ContentProcessorAPI/app/libs/base/application_base.py index a4821a13..f0311401 100644 --- a/src/ContentProcessorAPI/app/libs/base/application_base.py +++ b/src/ContentProcessorAPI/app/libs/base/application_base.py @@ -4,8 +4,10 @@ """Abstract base for the application bootstrap sequence. Orchestrates the startup order: load .env → read Azure App Configuration → -populate AppContext with configuration and credentials → configure logging → -call the concrete ``initialize()`` implemented by the subclass. +populate AppContext with configuration and credentials → configure logging. +The concrete ``initialize()`` hook is invoked +explicitly via ``bootstrap()`` +after construction is complete. """ import inspect @@ -53,14 +55,13 @@ def initialize(self): ) def __init__(self, env_file_path: str | None = None, **data): - """Execute the full bootstrap sequence. + """Execute base bootstrap setup. Steps: 1. Load ``.env`` from *env_file_path* (or derive from subclass location). 2. Read Azure App Configuration and inject values into ``os.environ``. 3. Populate ``application_context`` with config and Azure credentials. 4. Configure Python logging if enabled in config. - 5. Call ``self.initialize()``. Args: env_file_path: Explicit path to a ``.env`` file (optional). @@ -103,6 +104,8 @@ def __init__(self, env_file_path: str | None = None, **data): ): logging.getLogger(logger_name).setLevel(azure_level) + def bootstrap(self): + """Run subclass initialization after construction has completed.""" self.initialize() def _load_env(self, env_file_path: str | None = None): diff --git a/src/ContentProcessorAPI/app/libs/base/fastapi_protocol.py b/src/ContentProcessorAPI/app/libs/base/fastapi_protocol.py index 2c86b91e..34f48bec 100644 --- a/src/ContentProcessorAPI/app/libs/base/fastapi_protocol.py +++ b/src/ContentProcessorAPI/app/libs/base/fastapi_protocol.py @@ -24,7 +24,7 @@ class FastAPIWithContext(Protocol): app_context: AppContext def include_router(self, *args, **kwargs) -> None: - ... + pass def add_app_context_to_fastapi( diff --git a/src/ContentProcessorAPI/app/routers/claimprocessor.py b/src/ContentProcessorAPI/app/routers/claimprocessor.py index 00ea5e55..5eef92a0 100644 --- a/src/ContentProcessorAPI/app/routers/claimprocessor.py +++ b/src/ContentProcessorAPI/app/routers/claimprocessor.py @@ -166,8 +166,10 @@ async def delete_claim_container(claim_id: str, request: Request = None): ) try: claim_processor.delete_claim_container(claim_id=claim_id) - except Exception: - pass + except Exception as ex: + # Best-effort cleanup: continue deleting the claim-process record even if + # the backing claim container is already missing or cannot be deleted. + print(f"Failed to delete claim container for '{claim_id}': {ex}") batch_process_repository: ClaimBatchProcessRepository = app.app_context.get_service( ClaimBatchProcessRepository diff --git a/src/ContentProcessorWeb/src/Components/Header/Header.tsx b/src/ContentProcessorWeb/src/Components/Header/Header.tsx index 3dfad92d..0202dbe8 100644 --- a/src/ContentProcessorWeb/src/Components/Header/Header.tsx +++ b/src/ContentProcessorWeb/src/Components/Header/Header.tsx @@ -8,7 +8,7 @@ import React from "react"; import { useNavigate, useLocation } from "react-router-dom"; -import { useHeaderHooks, Header } from "../../Hooks/useHeaderHooks"; +import { Header } from "../../Hooks/useHeaderHooks"; import { TabList, Tab, diff --git a/src/ContentProcessorWeb/src/Components/UploadContent/UploadFilesModal.tsx b/src/ContentProcessorWeb/src/Components/UploadContent/UploadFilesModal.tsx index de077d96..97f1d453 100644 --- a/src/ContentProcessorWeb/src/Components/UploadContent/UploadFilesModal.tsx +++ b/src/ContentProcessorWeb/src/Components/UploadContent/UploadFilesModal.tsx @@ -337,7 +337,7 @@ const UploadFilesModal: React.FC = ({ open, onClose }) => setFileErrors({}) setUploadCompleted(false); setFileSchemas({}); - } + }; const onCloseHandler = () => { resetState(); onClose(); diff --git a/src/ContentProcessorWeb/src/Hooks/useFileType.test.ts b/src/ContentProcessorWeb/src/Hooks/useFileType.test.ts index 960e8926..afe4078a 100644 --- a/src/ContentProcessorWeb/src/Hooks/useFileType.test.ts +++ b/src/ContentProcessorWeb/src/Hooks/useFileType.test.ts @@ -5,7 +5,7 @@ * @file Tests for useFileType — MIME type resolution based on file extension. */ -import { renderHook, act } from '@testing-library/react'; +import { renderHook } from '@testing-library/react'; import useFileType from './useFileType'; import type { FileWithExtension } from './useFileType'; diff --git a/src/ContentProcessorWeb/src/Pages/DefaultPage/Components/ProcessQueueGrid/ProcessQueueGrid.tsx b/src/ContentProcessorWeb/src/Pages/DefaultPage/Components/ProcessQueueGrid/ProcessQueueGrid.tsx index e4f0193f..9db7210c 100644 --- a/src/ContentProcessorWeb/src/Pages/DefaultPage/Components/ProcessQueueGrid/ProcessQueueGrid.tsx +++ b/src/ContentProcessorWeb/src/Pages/DefaultPage/Components/ProcessQueueGrid/ProcessQueueGrid.tsx @@ -18,9 +18,7 @@ import { import { Tooltip, Button } from "@fluentui/react-components"; import { TableBody, TableCell, TableRow, Table, - TableHeader, TableHeaderCell, TableCellLayout, createTableColumn, useTableFeatures, - useTableSelection, useTableSort, TableColumnId, - TableRowId + TableHeader, TableHeaderCell, TableCellLayout } from "@fluentui/react-components"; import { useDispatch, useSelector, shallowEqual } from "react-redux"; diff --git a/src/ContentProcessorWeb/src/Pages/DefaultPage/PanelCenter.tsx b/src/ContentProcessorWeb/src/Pages/DefaultPage/PanelCenter.tsx index 14976f4d..0481b0b1 100644 --- a/src/ContentProcessorWeb/src/Pages/DefaultPage/PanelCenter.tsx +++ b/src/ContentProcessorWeb/src/Pages/DefaultPage/PanelCenter.tsx @@ -34,7 +34,6 @@ import { saveClaimComment, fetchContentJsonData, setActiveProcessId, - setModifiedResult, } from '../../store/slices/centerPanelSlice'; import { startLoader, stopLoader } from "../../store/slices/loaderSlice"; import { setRefreshGrid } from "../../store/slices/leftPanelSlice"; diff --git a/src/ContentProcessorWeb/src/Pages/DefaultPage/PanelRight.tsx b/src/ContentProcessorWeb/src/Pages/DefaultPage/PanelRight.tsx index 2931f083..7d0db3cd 100644 --- a/src/ContentProcessorWeb/src/Pages/DefaultPage/PanelRight.tsx +++ b/src/ContentProcessorWeb/src/Pages/DefaultPage/PanelRight.tsx @@ -14,7 +14,6 @@ import { bundleIcon, ChevronDoubleLeft20Filled, ChevronDoubleLeft20Regular } fro import { useDispatch, useSelector, shallowEqual } from 'react-redux'; import { AppDispatch, RootState } from '../../store'; import { fetchContentFileData } from '../../store/slices/rightPanelSlice'; -import { updatePanelCollapse } from "../../store/slices/defaultPageSlice"; import PanelToolbar from "../../Hooks/usePanelHooks"; import DocumentViewer from '../../Components/DocumentViewer/DocumentViewer'; diff --git a/src/ContentProcessorWeb/src/store/slices/centerPanelSlice.test.ts b/src/ContentProcessorWeb/src/store/slices/centerPanelSlice.test.ts index f27a3876..23daf308 100644 --- a/src/ContentProcessorWeb/src/store/slices/centerPanelSlice.test.ts +++ b/src/ContentProcessorWeb/src/store/slices/centerPanelSlice.test.ts @@ -118,7 +118,6 @@ describe('centerPanelSlice', () => { }); it('should set cError and clear contentData on rejected', () => { - const error = new Error('Server error'); const action = { type: fetchContentJsonData.rejected.type, error: { message: 'Server error' }, diff --git a/src/ContentProcessorWorkflow/src/libs/agent_framework/agent_framework_helper.py b/src/ContentProcessorWorkflow/src/libs/agent_framework/agent_framework_helper.py index ceb3f1ab..e2c9c9fb 100644 --- a/src/ContentProcessorWorkflow/src/libs/agent_framework/agent_framework_helper.py +++ b/src/ContentProcessorWorkflow/src/libs/agent_framework/agent_framework_helper.py @@ -142,7 +142,8 @@ def create_client( # noqa: E704 env_file_path: str | None = None, env_file_encoding: str | None = None, instruction_role: str | None = None, - ) -> "AzureOpenAIChatClient": ... + ) -> "AzureOpenAIChatClient": + pass @overload @staticmethod @@ -164,7 +165,8 @@ def create_client( # noqa: E704 env_file_encoding: str | None = None, instruction_role: str | None = None, retry_config: RateLimitRetryConfig | None = None, - ) -> AzureOpenAIChatClientWithRetry: ... + ) -> AzureOpenAIChatClientWithRetry: + pass @overload @staticmethod @@ -187,7 +189,8 @@ def create_client( # noqa: E704 async_client: object | None = None, env_file_path: str | None = None, env_file_encoding: str | None = None, - ) -> "AzureOpenAIAssistantsClient": ... + ) -> "AzureOpenAIAssistantsClient": + raise NotImplementedError @overload @staticmethod @@ -208,7 +211,8 @@ def create_client( # noqa: E704 env_file_path: str | None = None, env_file_encoding: str | None = None, instruction_role: str | None = None, - ) -> "AzureOpenAIResponsesClient": ... + ) -> "AzureOpenAIResponsesClient": + pass @overload @staticmethod @@ -230,7 +234,8 @@ def create_client( # noqa: E704 env_file_encoding: str | None = None, instruction_role: str | None = None, retry_config: RateLimitRetryConfig | None = None, - ) -> AzureOpenAIResponseClientWithRetry: ... + ) -> AzureOpenAIResponseClientWithRetry: + raise NotImplementedError @overload @staticmethod @@ -246,7 +251,8 @@ def create_client( # noqa: E704 async_credential: object | None = None, env_file_path: str | None = None, env_file_encoding: str | None = None, - ) -> "AzureAIAgentClient": ... + ) -> "AzureAIAgentClient": + pass @staticmethod def create_client( diff --git a/src/ContentProcessorWorkflow/src/libs/agent_framework/azure_openai_response_retry.py b/src/ContentProcessorWorkflow/src/libs/agent_framework/azure_openai_response_retry.py index 1b9a32b2..e3f74fcf 100644 --- a/src/ContentProcessorWorkflow/src/libs/agent_framework/azure_openai_response_retry.py +++ b/src/ContentProcessorWorkflow/src/libs/agent_framework/azure_openai_response_retry.py @@ -679,8 +679,15 @@ async def _tail(): if callable(close): try: await close() - except Exception: - pass + except Exception as close_error: + # Best-effort cleanup: ignore close failures so we preserve + # retry/original-error handling behavior. + logger.debug( + "[AOAI_RETRY_STREAM] ignored stream close error during cleanup: %s", + _format_exc_brief(close_error) + if isinstance(close_error, BaseException) + else str(close_error), + ) # One-shot retry for context-length failures. if ( @@ -865,8 +872,13 @@ async def _tail(): if callable(close): try: await close() - except Exception: - pass + except Exception as close_err: + logger.debug( + "[AOAI_RETRY_STREAM] ignoring stream close error during cleanup: %s", + _format_exc_brief(close_err) + if isinstance(close_err, BaseException) + else str(close_err), + ) # One-shot retry for context-length failures. if ( diff --git a/src/ContentProcessorWorkflow/src/main_service.py b/src/ContentProcessorWorkflow/src/main_service.py index 66ba43db..268bf48c 100644 --- a/src/ContentProcessorWorkflow/src/main_service.py +++ b/src/ContentProcessorWorkflow/src/main_service.py @@ -370,8 +370,11 @@ async def run_queue_service( try: if app.queue_service: await app.queue_service.stop_service() - except Exception: - pass + except Exception as cleanup_error: + logger.debug( + "Ignoring cleanup error while re-raising original failure: %s", + cleanup_error, + ) raise diff --git a/src/ContentProcessorWorkflow/src/services/content_process_service.py b/src/ContentProcessorWorkflow/src/services/content_process_service.py index 7b1e447f..4ddf49e0 100644 --- a/src/ContentProcessorWorkflow/src/services/content_process_service.py +++ b/src/ContentProcessorWorkflow/src/services/content_process_service.py @@ -10,6 +10,7 @@ """ import asyncio +import inspect import json import logging import uuid @@ -295,7 +296,7 @@ async def poll_status( if on_poll is not None: poll_handler = on_poll(result) - if asyncio.iscoroutine(poll_handler): + if inspect.isawaitable(poll_handler): await poll_handler status = result.get("status", "processing") diff --git a/src/ContentProcessorWorkflow/src/services/queue_service.py b/src/ContentProcessorWorkflow/src/services/queue_service.py index 66bd3d1e..18d802ee 100644 --- a/src/ContentProcessorWorkflow/src/services/queue_service.py +++ b/src/ContentProcessorWorkflow/src/services/queue_service.py @@ -106,8 +106,12 @@ def parse_claim_task_parameters_from_queue_content( try: content = decoded.decode("utf-8") except UnicodeDecodeError: + # Decoded bytes are not UTF-8; keep original content and let the + # JSON validation path below raise a clear payload-format error. pass except Exception: + # Not valid base64 (common for plain JSON payloads); keep original + # content and continue normal JSON parsing. pass content = content.strip() @@ -410,18 +414,27 @@ async def stop_service(self): if self.main_queue: self.main_queue.close() except Exception: - pass + logger.debug( + "Ignoring error while closing main queue client during shutdown.", + exc_info=True, + ) try: if self.dead_letter_queue: self.dead_letter_queue.close() except Exception: - pass + logger.debug( + "Ignoring dead-letter queue close error during shutdown.", + exc_info=True, + ) try: self.queue_service.close() except Exception: - pass + logger.debug( + "Ignoring error while closing queue service client during shutdown.", + exc_info=True, + ) async def force_stop(self): """Alias for ``stop_service()`` (stop already cancels worker tasks).""" @@ -510,8 +523,15 @@ async def stop_process( process_id, target_worker_id, ) - except Exception: - pass + except Exception as exc: + # Best-effort kill path: preserve behavior by not failing the + # request, but record unexpected cancellation/await errors. + logger.warning( + "Unexpected error while finalizing cancellation for process_id=%s worker_id=%s: %s", + process_id, + target_worker_id, + exc, + ) return True @@ -1003,7 +1023,7 @@ async def _process_queue_message(self, worker_id: int, queue_message: QueueMessa except Exception as e: workflow_error = e finally: - claim_processor = None + pass execution_time = time.time() - message_start_time @@ -1069,8 +1089,15 @@ async def _process_queue_message(self, worker_id: int, queue_message: QueueMessa claim_process_id_for_cleanup=None, worker_id=worker_id, ) - except Exception: - pass + except Exception as dead_letter_error: + # Intentionally swallow to keep worker loop alive in this last-resort path. + # We still log the failure for diagnostics/alerting. + logger.exception( + "[worker %s] failed while handling fallback failure path for message_id=%s: %s", + worker_id, + getattr(queue_message, "id", ""), + dead_letter_error, + ) finally: if renew_task is not None: renew_task.cancel() @@ -1280,7 +1307,11 @@ async def _handle_failed_no_retry( visibility_timeout=max(60, retry_delay_s), ) except Exception: - pass + logger.exception( + "Failed to extend visibility timeout after DLQ send failure; message may be retried sooner than expected (message_id=%s worker_id=%s)", + getattr(queue_message, "id", None), + worker_id, + ) return # Cleanup: diff --git a/src/ContentProcessorWorkflow/src/utils/http_request.py b/src/ContentProcessorWorkflow/src/utils/http_request.py index b62ebdeb..a6b3c0d0 100644 --- a/src/ContentProcessorWorkflow/src/utils/http_request.py +++ b/src/ContentProcessorWorkflow/src/utils/http_request.py @@ -18,6 +18,7 @@ from __future__ import annotations import asyncio +import inspect import json import time from dataclasses import dataclass @@ -162,6 +163,8 @@ def __call__(self, retry_state: RetryCallState) -> float: if ra is not None: return min(max(ra, self._min), self._max) except Exception: + # Intentionally ignore non-critical errors while inspecting Retry-After + # and fall back to exponential backoff below. pass attempt = max(retry_state.attempt_number, 1) @@ -580,6 +583,7 @@ async def post_multipart_json( try: h.close() except Exception: + # Best-effort cleanup: do not let close() failures mask the main request result. pass async def poll_until_done( @@ -630,7 +634,7 @@ async def poll_until_done( if on_poll is not None: maybe_awaitable = on_poll(resp) - if asyncio.iscoroutine(maybe_awaitable): + if inspect.isawaitable(maybe_awaitable): await maybe_awaitable if resp.status in done: diff --git a/src/ContentProcessorWorkflow/tests/conftest.py b/src/ContentProcessorWorkflow/tests/conftest.py index ce7014b5..d7df7cec 100644 --- a/src/ContentProcessorWorkflow/tests/conftest.py +++ b/src/ContentProcessorWorkflow/tests/conftest.py @@ -4,6 +4,7 @@ """Shared pytest fixtures and configuration for the test suite.""" +import importlib import sys from pathlib import Path @@ -17,7 +18,7 @@ # pick up our `src/sitecustomize.py` unless `PYTHONPATH=src` is set. Import it # explicitly after adding `src/` to `sys.path` so test collection works. try: - import sitecustomize # noqa: F401 + importlib.import_module("sitecustomize") except Exception: # Tests should still be able to run even if the compatibility hook is absent. pass diff --git a/src/ContentProcessorWorkflow/tests/unit/libs/application/test_application_context_di.py b/src/ContentProcessorWorkflow/tests/unit/libs/application/test_application_context_di.py index d8668eb6..3241ef22 100644 --- a/src/ContentProcessorWorkflow/tests/unit/libs/application/test_application_context_di.py +++ b/src/ContentProcessorWorkflow/tests/unit/libs/application/test_application_context_di.py @@ -33,7 +33,7 @@ def test_caches_instance(self) -> None: assert a is b def test_with_factory(self) -> None: - ctx = AppContext().add_singleton(_S1, lambda: _S1()) + ctx = AppContext().add_singleton(_S1, _S1) a = ctx.get_service(_S1) b = ctx.get_service(_S1) assert a is b diff --git a/src/ContentProcessorWorkflow/tests/unit/steps/test_rai_executor.py b/src/ContentProcessorWorkflow/tests/unit/steps/test_rai_executor.py index b2522982..1c566c76 100644 --- a/src/ContentProcessorWorkflow/tests/unit/steps/test_rai_executor.py +++ b/src/ContentProcessorWorkflow/tests/unit/steps/test_rai_executor.py @@ -22,7 +22,6 @@ # The @handler decorator in agent_framework validates type annotations at # import time, which fails in the test environment. Patch it to a no-op # before importing the executor module. -_orig_handler = sys.modules.get("agent_framework", MagicMock()).handler # type: ignore[union-attr] with patch("agent_framework.handler", lambda fn: fn): from steps.rai.executor.rai_executor import RAIExecutor From bc0c8860cec372f056dcafe9da31392f4dfc9693 Mon Sep 17 00:00:00 2001 From: "Prekshith D J (Persistent Systems Inc)" Date: Thu, 9 Apr 2026 18:22:14 +0530 Subject: [PATCH 25/25] Removed unused import sys --- .../tests/unit/steps/test_rai_executor.py | 1 - 1 file changed, 1 deletion(-) diff --git a/src/ContentProcessorWorkflow/tests/unit/steps/test_rai_executor.py b/src/ContentProcessorWorkflow/tests/unit/steps/test_rai_executor.py index 1c566c76..df66ddc1 100644 --- a/src/ContentProcessorWorkflow/tests/unit/steps/test_rai_executor.py +++ b/src/ContentProcessorWorkflow/tests/unit/steps/test_rai_executor.py @@ -11,7 +11,6 @@ from __future__ import annotations import asyncio -import sys from pathlib import Path from unittest.mock import AsyncMock, MagicMock, patch