diff --git a/OneBranchPipelines/build-release-package-pipeline.yml b/OneBranchPipelines/build-release-package-pipeline.yml index 6567e7b0..970928e6 100644 --- a/OneBranchPipelines/build-release-package-pipeline.yml +++ b/OneBranchPipelines/build-release-package-pipeline.yml @@ -1,20 +1,44 @@ -# OneBranch Pipeline for mssql-python -# Builds Python wheels for Windows, macOS, and Linux with security compliance +# ========================================================================================= +# OneBranch Release Pipeline for mssql-python +# ========================================================================================= +# Builds Python wheels for all supported platforms with SDL compliance: +# - Windows: Python 3.10-3.14 (x64 + ARM64) +# - macOS: Python 3.10-3.14 (Universal2 = x86_64 + ARM64 in single binary) +# - Linux: Python 3.10-3.14 on manylinux/musllinux (x86_64 + ARM64) +# +# Security Features: +# - ESRP code signing (Windows .pyd files only) +# - ESRP malware scanning (all artifacts) +# - Component Governance (dependency scanning) +# - BinSkim (binary security analysis) +# - CredScan (credential leak detection) +# - PoliCheck (inclusive language scanning) +# - CodeQL (static code analysis) +# - SBOM generation (Software Bill of Materials) +# ========================================================================================= +# Build number format: YYDDD.r (YY=year, DDD=day of year, r=revision) +# Example: 24365.1 = 2024, day 365, revision 1 name: $(Year:YY)$(DayOfYear)$(Rev:.r) -# Pipeline triggers +# ========================= +# PIPELINE TRIGGERS +# ========================= +# Trigger on commits to main branch trigger: branches: include: - main +# Trigger on pull requests to main branch pr: branches: include: - main -# Schedule the pipeline to run on main branch daily at 07:00 AM IST +# Schedule: Daily builds at 07:00 AM IST (01:30 UTC) +# Cron format: minute hour day month weekday +# always:true = run even if no code changes schedules: - cron: "30 1 * * *" displayName: Daily run at 07:00 AM IST @@ -23,8 +47,14 @@ schedules: - main always: true -# Parameters for pipeline behavior +# ========================= +# PIPELINE PARAMETERS +# ========================= parameters: + # OneBranch build type determines compliance level + # - Official: Production builds with full SDL compliance, all security scanning enabled + # - NonOfficial: Development/test builds with reduced security scanning + # Note: Scheduled (daily) builds automatically use 'Official' regardless of this setting - name: oneBranchType displayName: 'OneBranch Template Type' type: string @@ -33,33 +63,30 @@ parameters: - 'NonOfficial' default: 'NonOfficial' - - name: buildConfiguration - displayName: 'Build Configuration' - type: string - values: - - 'Release' - - 'Debug' - default: 'Release' - + # Enable/disable SDL security tasks (BinSkim, CredScan, PoliCheck, etc.) + # Set to false for faster builds during development - name: runSdlTasks displayName: 'Run SDL Security Tasks' type: boolean default: true - - - name: signingEnabled - displayName: 'Enable Code Signing (ESRP)' - type: boolean - default: true - - - name: packageVersion - displayName: 'Package Version (e.g., 0.13.0)' - type: string - default: '0.13.0' - # Configuration matrices for each platform + # ========================= + # PLATFORM CONFIGURATIONS + # ========================= + # Each platform uses different matrix strategy: + # - Windows: Explicit per-version stages (9 stages for x64/ARM64 combos) + # - macOS: Explicit per-version stages (5 stages for universal2 builds) + # - Linux: Per-distro stages, builds ALL Python versions in loop (4 stages) + + # Windows Configuration Matrix + # Each entry creates separate stage: Win_py_ + # pyVer format: '310' = Python 3.10, '314' = Python 3.14 + # arch: 'x64' (Intel/AMD 64-bit) or 'arm64' (ARM64, cross-compiled on x64) + # Note: ARM64 builds use x64 host with ARM64 python.lib for cross-compilation - name: windowsConfigs type: object default: + # x64 builds (5 versions: 3.10-3.14) - pyVer: '310' arch: 'x64' - pyVer: '311' @@ -68,54 +95,89 @@ parameters: arch: 'x64' - pyVer: '313' arch: 'x64' + # - pyVer: '314' # Life of π: Unfolding in v1.0.0 + # arch: 'x64' + # ARM64 builds (4 versions: 3.11-3.14) + # 3.10 excluded due to limited ARM64 support - pyVer: '311' arch: 'arm64' - pyVer: '312' arch: 'arm64' - pyVer: '313' arch: 'arm64' + # - pyVer: '314' # Life of π: Unfolding in v1.0.0 + # arch: 'arm64' + # macOS Configuration Matrix + # Each entry creates separate stage: MacOS_py + # All builds are Universal2 (x86_64 + ARM64 in single binary) + # pyVer format: '310' = Python 3.10, '314' = Python 3.14 - name: macosConfigs type: object default: + # 5 versions: 3.10-3.14 (all universal2) - pyVer: '310' - pyVer: '311' - pyVer: '312' - pyVer: '313' + # - pyVer: '314' # Life of π: Unfolding in v1.0.0 + # Linux Configuration Matrix + # Each entry creates ONE stage that builds ALL Python versions (3.10-3.14) + # tag: 'manylinux' (glibc-based, e.g., Ubuntu/CentOS) or 'musllinux' (musl-based, e.g., Alpine) + # arch: CPU architecture for Docker platform + # platform: Docker platform identifier for multi-arch builds - name: linuxConfigs type: object default: + # manylinux (glibc-based) for x86_64 and ARM64 - { tag: 'manylinux', arch: 'x86_64', platform: 'linux/amd64' } - { tag: 'manylinux', arch: 'aarch64', platform: 'linux/arm64' } + # musllinux (musl-based) for x86_64 and ARM64 - { tag: 'musllinux', arch: 'x86_64', platform: 'linux/amd64' } - { tag: 'musllinux', arch: 'aarch64', platform: 'linux/arm64' } -# Variable templates +# ========================= +# PIPELINE VARIABLES +# ========================= variables: - # Set package version from parameter - - name: PACKAGE_VERSION - value: '${{ parameters.packageVersion }}' - readonly: true - # Set package version from parameter - - name: PACKAGE_VERSION - value: '${{ parameters.packageVersion }}' - readonly: true - - # Alias for SDL tools (compile-time) - - name: packageVersion - value: '${{ parameters.packageVersion }}' - readonly: true + # Determine effective build type: scheduled builds are Official, manual/PR builds use parameter + # Build.Reason values: Schedule, Manual, IndividualCI, PullRequest, BatchedCI + - name: effectiveOneBranchType + ${{ if eq(variables['Build.Reason'], 'Schedule') }}: + value: 'Official' + ${{ else }}: + value: '${{ parameters.oneBranchType }}' + # Variable template imports + # Each file provides specific variable groups: + # - common-variables: Shared across all builds (paths, flags) + # - onebranch-variables: OneBranch-specific settings (SDL, compliance) + # - build-variables: Build configuration (compiler flags, options) + # - signing-variables: ESRP signing credentials and settings + # - symbol-variables: Debug symbol publishing configuration - template: /OneBranchPipelines/variables/common-variables.yml@self - template: /OneBranchPipelines/variables/onebranch-variables.yml@self - template: /OneBranchPipelines/variables/build-variables.yml@self - template: /OneBranchPipelines/variables/signing-variables.yml@self - template: /OneBranchPipelines/variables/symbol-variables.yml@self - # Variable groups - - group: 'ESRP Federated Creds (AME)' # Contains ESRP signing credentials + + # Variable group from Azure DevOps Library + # Contains ESRP service connection credentials: + # - SigningEsrpConnectedServiceName + # - SigningAppRegistrationClientId + # - SigningAppRegistrationTenantId + # - SigningEsrpClientId + # - DB_PASSWORD (SQL Server SA password for testing) + - group: 'ESRP Federated Creds (AME)' -# OneBranch resources +# ========================= +# ONEBRANCH RESOURCES +# ========================= +# OneBranch.Pipelines/GovernedTemplates repository contains: +# - SDL compliance templates (BinSkim, CredScan, PoliCheck, etc.) +# - Security scanning templates (ESRP, Component Governance) +# - Artifact publishing templates (OneBranch-compliant artifact handling) resources: repositories: - repository: templates @@ -123,132 +185,231 @@ resources: name: 'OneBranch.Pipelines/GovernedTemplates' ref: 'refs/heads/main' -# Extend OneBranch official template +# ========================= +# PIPELINE TEMPLATE EXTENSION +# ========================= +# Extends OneBranch official template for cross-platform builds +# Template type determined by effectiveOneBranchType: +# - Scheduled builds: Always Official (full SDL compliance) +# - Manual/PR builds: Uses oneBranchType parameter (default NonOfficial) extends: - template: 'v2/OneBranch.${{ parameters.oneBranchType }}.CrossPlat.yml@templates' + template: 'v2/OneBranch.${{ variables.effectiveOneBranchType }}.CrossPlat.yml@templates' + # ========================= + # ONEBRANCH TEMPLATE PARAMETERS + # ========================= parameters: - # Container definitions - # Note: All jobs use custom 1ES pools or Microsoft-hosted agents - # Windows: Django-1ES-pool + WIN22-SQL22 (custom 1ES) - # Linux: Django-1ES-pool + ADO-UB22-SQL22 (custom 1ES) - # macOS: Azure Pipelines + macOS-14 (Microsoft-hosted) + # Pool Configuration + # Different platforms use different agent pools: + # - Windows: Custom 1ES pool (Django-1ES-pool) with WIN22-SQL22 image (Windows Server 2022 + SQL Server 2022) + # - Linux: Custom 1ES pool (Django-1ES-pool) with ADO-UB22-SQL22 image (Ubuntu 22.04 + SQL Server 2022) + # - macOS: Microsoft-hosted pool (Azure Pipelines) with macOS-14 image (macOS Sonoma) + # Note: Container definitions section present but unused (pools configured in individual stage templates) - # Feature flags + # Feature Flags + # Controls OneBranch platform behavior featureFlags: + # Use Windows Server 2022 base image for Windows builds WindowsHostVersion: Version: '2022' - binskimScanAllExtensions: true # Enable scanning of all supported file types including .pyd + # Enable BinSkim scanning for all supported file extensions + # Without this, only .dll/.exe scanned (misses .pyd Python extensions) + binskimScanAllExtensions: true - # Global SDL Configuration + # ========================= + # GLOBAL SDL CONFIGURATION + # ========================= + # SDL = Security Development Lifecycle + # Comprehensive security scanning across all build stages # See: https://aka.ms/obpipelines/sdl globalSdl: # Global Guardian baseline and suppression files + # Baseline = known issues that are being tracked + # Suppression = false positives that should be ignored baseline: - baselineFile: $(Build.SourcesDirectory)\.gdn\.gdnbaselines + baselineFile: $(Build.SourcesDirectory)/.gdn/.gdnbaselines suppressionSet: default suppression: - suppressionFile: $(Build.SourcesDirectory)\.gdn\.gdnsuppress + suppressionFile: $(Build.SourcesDirectory)/.gdn/.gdnsuppress suppressionSet: default # ApiScan - Scans APIs for security vulnerabilities - # Disabled: Not applicable to Python wheel distribution model + # Disabled: Requires PDB symbols for Windows DLLs + # Python wheels (.pyd files) better covered by BinSkim + # Justification: JDBC team also disables APIScan for similar reasons apiscan: enabled: false justificationForDisabling: 'APIScan requires PDB symbols for native Windows DLLs. Python wheels primarily contain .pyd files and Python code, better covered by BinSkim. JDBC team also has APIScan disabled for similar reasons.' # Armory - Security scanning for binaries + # Checks for known vulnerabilities in compiled artifacts + # break:true = fail build if critical issues found armory: enabled: ${{ parameters.runSdlTasks }} break: true - # AsyncSdl - Asynchronous SDL tasks + # AsyncSdl - Asynchronous SDL tasks (run after build completion) + # Disabled: All SDL tasks run synchronously during build asyncSdl: enabled: false - # BinSkim - Binary analyzer for security issues + # BinSkim - Binary security analyzer (Microsoft tool) + # Scans compiled binaries for security best practices: + # - Stack buffer overrun protection (/GS) + # - DEP (Data Execution Prevention) + # - ASLR (Address Space Layout Randomization) + # - Control Flow Guard (CFG) + # Scans: .pyd (Python), .dll/.exe (Windows), .so (Linux), .dylib (macOS) binskim: enabled: ${{ parameters.runSdlTasks }} - break: true - # Scan all binary types: .pyd (Python), .dll/.exe (Windows), .so (Linux), .dylib (macOS) + break: true # Fail build on critical BinSkim errors + # Recursive scan of all binary file types analyzeTarget: '$(Build.SourcesDirectory)/**/*.{pyd,dll,exe,so,dylib}' analyzeRecurse: true + # SARIF output (Static Analysis Results Interchange Format) logFile: '$(Build.ArtifactStagingDirectory)/BinSkimResults.sarif' # CodeInspector - Source code security analysis + # Checks Python/C++ code for security anti-patterns codeinspector: enabled: ${{ parameters.runSdlTasks }} logLevel: Error - # CodeQL - Semantic code analysis + # CodeQL - Semantic code analysis (GitHub Advanced Security) + # Deep analysis of Python and C++ code: + # - SQL injection vulnerabilities + # - Buffer overflows + # - Use-after-free + # - Integer overflows + # security-extended suite = comprehensive security queries codeql: enabled: ${{ parameters.runSdlTasks }} language: 'python,cpp' sourceRoot: '$(REPO_ROOT)' querySuite: security-extended - # CredScan - Scans for credentials in code - # Note: Global baseline/suppression files configured at globalSdl level + # CredScan - Credential scanner + # Detects hardcoded credentials, API keys, passwords in code + # Uses global baseline/suppression files configured above credscan: enabled: ${{ parameters.runSdlTasks }} - # ESLint - JavaScript/TypeScript specific, not applicable for Python + # ESLint - JavaScript/TypeScript linter + # Disabled: Not applicable to Python/C++ project eslint: enabled: false - # PoliCheck - Checks for politically incorrect terms + # PoliCheck - Political correctness checker + # Scans code and documentation for inappropriate terms + # Exclusion file contains approved exceptions (technical terms) policheck: enabled: ${{ parameters.runSdlTasks }} break: true exclusionFile: '$(REPO_ROOT)/.config/PolicheckExclusions.xml' - # Roslyn Analyzers - .NET-specific, not applicable for Python + # Roslyn Analyzers - .NET C# code analysis + # Disabled: Not applicable to Python/C++ project roslyn: enabled: false - # Publish SDL logs + # Publish SDL Logs + # Uploads security scan results (SARIF files) to pipeline artifacts + # Used for audit trail and compliance reporting publishLogs: enabled: ${{ parameters.runSdlTasks }} # SBOM - Software Bill of Materials + # Generates machine-readable list of all dependencies + # Required for supply chain security and compliance + # Format: SPDX or CycloneDX + # Version automatically detected from wheel metadata (setup.py) sbom: enabled: ${{ parameters.runSdlTasks }} packageName: 'mssql-python' - packageVersion: '${{ variables.packageVersion }}' - # TSA - Threat and Security Assessment (Official builds only) + # TSA - Threat and Security Assessment + # Uploads scan results to Microsoft's TSA tool for tracking + # Only enabled for Official builds (production compliance requirement) tsa: - enabled: ${{ and(eq(parameters.oneBranchType, 'Official'), parameters.runSdlTasks) }} + enabled: ${{ and(eq(variables.effectiveOneBranchType, 'Official'), parameters.runSdlTasks) }} configFile: '$(REPO_ROOT)/.config/tsaoptions.json' - # Pipeline stages + # ========================= + # PIPELINE STAGES + # ========================= + # Total stages: 9 Windows + 5 macOS + 4 Linux + 1 Consolidate = 19 stages + # Stages run in parallel (no dependencies between platform builds) stages: - # Windows stages - one per Python version/architecture combination + # ========================= + # WINDOWS BUILD STAGES + # ========================= + # Strategy: Explicit stage per Python version × architecture + # Total: 9 stages (5 x64 + 4 ARM64) + # Python versions: 3.10-3.14 (x64), 3.11-3.14 (ARM64) + # Each stage: + # 1. Installs Python (UsePythonVersion or NuGet for 3.14) + # 2. Downloads ARM64 python.lib if cross-compiling + # 3. Builds .pyd native extension + # 4. Runs pytest (x64 only, ARM64 can't execute on x64 host) + # 5. Builds wheel + # 6. Publishes artifacts (wheels + PYD + PDB) + # 7. ESRP malware scanning - ${{ each config in parameters.windowsConfigs }}: - template: /OneBranchPipelines/stages/build-windows-single-stage.yml@self parameters: stageName: Win_py${{ config.pyVer }}_${{ config.arch }} jobName: BuildWheel + # Convert pyVer '310' → pythonVersion '3.10' pythonVersion: ${{ format('{0}.{1}', substring(config.pyVer, 0, 1), substring(config.pyVer, 1, 2)) }} shortPyVer: ${{ config.pyVer }} architecture: ${{ config.arch }} - oneBranchType: '${{ parameters.oneBranchType }}' - signingEnabled: '${{ parameters.signingEnabled }}' - buildConfiguration: '${{ parameters.buildConfiguration }}' + oneBranchType: '${{ variables.effectiveOneBranchType }}' - # macOS stages - one per Python version (universal2 binaries) + # ========================= + # MACOS BUILD STAGES + # ========================= + # Strategy: Explicit stage per Python version + # Total: 5 stages (3.10-3.14) + # All builds are Universal2 (x86_64 + ARM64 in single .so binary) + # Each stage: + # 1. Installs Python via UsePythonVersion@0 + # 2. Installs CMake and pybind11 + # 3. Builds universal2 .so (ARCHFLAGS="-arch x86_64 -arch arm64") + # 4. Starts SQL Server Docker container (via Colima) + # 5. Runs pytest + # 6. Builds wheel + # 7. Publishes artifacts (wheels + .so) + # 8. ESRP malware scanning - ${{ each config in parameters.macosConfigs }}: - template: /OneBranchPipelines/stages/build-macos-single-stage.yml@self parameters: stageName: MacOS_py${{ config.pyVer }} jobName: BuildWheel + # Convert pyVer '310' → pythonVersion '3.10' pythonVersion: ${{ format('{0}.{1}', substring(config.pyVer, 0, 1), substring(config.pyVer, 1, 2)) }} shortPyVer: ${{ config.pyVer }} - oneBranchType: '${{ parameters.oneBranchType }}' - signingEnabled: '${{ parameters.signingEnabled }}' - buildConfiguration: '${{ parameters.buildConfiguration }}' + oneBranchType: '${{ variables.effectiveOneBranchType }}' - # Linux stages - one per distribution/architecture (builds all Python versions inside) + # ========================= + # LINUX BUILD STAGES + # ========================= + # Strategy: One stage per distribution × architecture + # Total: 4 stages (manylinux×2 + musllinux×2) + # Each stage builds ALL Python versions (3.10-3.14) in a loop + # Distributions: + # - manylinux: glibc-based (Ubuntu, CentOS, etc.) + # - musllinux: musl-based (Alpine Linux) + # Architectures: x86_64 (AMD/Intel), aarch64 (ARM64) + # Each stage: + # 1. Starts PyPA Docker container (manylinux_2_28 or musllinux_1_2) + # 2. Starts SQL Server Docker container + # 3. For each Python version (cp310-cp314): + # a. Builds .so native extension + # b. Builds wheel + # c. Installs wheel in isolated directory + # d. Runs pytest against SQL Server + # 4. Publishes artifacts (all 5 wheels) + # 5. Component Governance + AntiMalware scanning - ${{ each config in parameters.linuxConfigs }}: - template: /OneBranchPipelines/stages/build-linux-single-stage.yml@self parameters: @@ -257,28 +418,40 @@ extends: linuxTag: ${{ config.tag }} arch: ${{ config.arch }} dockerPlatform: ${{ config.platform }} - oneBranchType: '${{ parameters.oneBranchType }}' - signingEnabled: '${{ parameters.signingEnabled }}' - buildConfiguration: '${{ parameters.buildConfiguration }}' + oneBranchType: '${{ variables.effectiveOneBranchType }}' - # Consolidate all artifacts into single dist/ folder + # ========================= + # CONSOLIDATE STAGE + # ========================= + # Purpose: Collect all artifacts from platform builds into single dist/ folder + # Dependencies: All 18 build stages (9 Windows + 5 macOS + 4 Linux) + # Stages run in parallel, Consolidate waits for ALL to complete + # Outputs: + # - dist/wheels/*.whl (all platform wheels) + # - dist/bindings/Windows/*.{pyd,pdb} (Windows native extensions) + # - dist/bindings/macOS/*.so (macOS universal2 binaries) + # - dist/bindings/Linux/*.so (Linux native extensions) + # This stage also runs final BinSkim scan on all binaries - stage: Consolidate displayName: 'Consolidate All Artifacts' dependsOn: - # Windows dependencies + # Windows dependencies (9 stages) - Win_py310_x64 - Win_py311_x64 - Win_py312_x64 - Win_py313_x64 + # - Win_py314_x64 # Life of π: Unfolding in v1.0.0 - Win_py311_arm64 - Win_py312_arm64 - Win_py313_arm64 - # macOS dependencies + # - Win_py314_arm64 # Life of π: Unfolding in v1.0.0 + # macOS dependencies (5 stages) - MacOS_py310 - MacOS_py311 - MacOS_py312 - MacOS_py313 - # Linux dependencies + # - MacOS_py314 # Life of π: Unfolding in v1.0.0 + # Linux dependencies (4 stages) - Linux_manylinux_x86_64 - Linux_manylinux_aarch64 - Linux_musllinux_x86_64 @@ -286,6 +459,9 @@ extends: jobs: - template: /OneBranchPipelines/jobs/consolidate-artifacts-job.yml@self parameters: - oneBranchType: '${{ parameters.oneBranchType }}' + # CRITICAL: Use effectiveOneBranchType to ensure scheduled builds run as 'Official' + # Using parameters.oneBranchType would break scheduled builds (they'd run as 'NonOfficial') + oneBranchType: '${{ variables.effectiveOneBranchType }}' - # Note: Symbol publishing is now handled directly in the Windows build stages + # Note: Symbol publishing handled directly in Windows build stages + # PDB files uploaded to Microsoft Symbol Server for debugging diff --git a/OneBranchPipelines/stages/build-linux-single-stage.yml b/OneBranchPipelines/stages/build-linux-single-stage.yml index 58003c5d..bd81ed58 100644 --- a/OneBranchPipelines/stages/build-linux-single-stage.yml +++ b/OneBranchPipelines/stages/build-linux-single-stage.yml @@ -1,55 +1,63 @@ # Linux Single Configuration Stage Template # Builds Python wheels for a specific Linux distribution and architecture -# Builds for Python 3.10, 3.11, 3.12, 3.13 within single job +# Builds for Python 3.10, 3.11, 3.12, 3.13, 3.14 within single job +# Tests each wheel after building with isolated pytest execution parameters: + # Stage identifier (e.g., 'Linux_manylinux_x86_64') - name: stageName type: string + # Job identifier within the stage - name: jobName type: string default: 'BuildWheels' + # Linux distribution type: 'manylinux' (glibc-based) or 'musllinux' (musl libc-based) - name: linuxTag - type: string # 'manylinux' or 'musllinux' + type: string + # CPU architecture: 'x86_64' (AMD64) or 'aarch64' (ARM64) - name: arch - type: string # 'x86_64' or 'aarch64' + type: string + # Docker platform for QEMU emulation: 'linux/amd64' or 'linux/arm64' - name: dockerPlatform - type: string # 'linux/amd64' or 'linux/arm64' + type: string + # OneBranch build type: 'Official' (production) or 'NonOfficial' (dev/test) - name: oneBranchType type: string default: 'Official' - - name: signingEnabled - type: boolean - default: true - - name: buildConfiguration - type: string - default: 'Release' stages: - stage: ${{ parameters.stageName }} - displayName: 'Build Linux ${{ parameters.linuxTag }} ${{ parameters.arch }}' + displayName: 'Linux ${{ parameters.linuxTag }} ${{ parameters.arch }}' jobs: - job: ${{ parameters.jobName }} displayName: 'Build Wheels - ${{ parameters.linuxTag }} ${{ parameters.arch }}' + # Use custom 1ES pool with Ubuntu 22.04 + SQL Server 2022 pre-installed pool: type: linux isCustom: true name: Django-1ES-pool demands: - imageOverride -equals ADO-UB22-SQL22 + # Extended timeout for multi-version builds + testing (5 Python versions × build + test time) timeoutInMinutes: 120 variables: - # Disable BinSkim for Linux - requires ICU libraries not available in containers + # Disable BinSkim for Linux - requires ICU libraries not available in manylinux/musllinux containers - name: ob_sdl_binskim_enabled value: false + # OneBranch output directory for artifacts (wheels, bindings, symbols) - name: ob_outputDirectory value: '$(Build.ArtifactStagingDirectory)' + # OneBranch-required variable (unused in this template) - name: LinuxContainerImage value: 'onebranch.azurecr.io/linux/ubuntu-2204:latest' + # Distribution type passed to container selection logic - name: LINUX_TAG value: ${{ parameters.linuxTag }} + # Architecture passed to container selection and file naming - name: ARCH value: ${{ parameters.arch }} + # Docker platform for QEMU-based cross-compilation - name: DOCKER_PLATFORM value: ${{ parameters.dockerPlatform }} @@ -136,45 +144,203 @@ stages: fi displayName: 'Install system build dependencies' + # Start SQL Server container for pytest execution + # Runs on host (not in build container) to be accessible from build container via network + - script: | + set -euxo pipefail + + echo "Starting SQL Server 2022 container for testing..." + docker run -d --name sqlserver-$(LINUX_TAG)-$(ARCH) \ + --platform linux/amd64 \ + -e ACCEPT_EULA=Y \ + -e MSSQL_SA_PASSWORD="$(DB_PASSWORD)" \ + -p 1433:1433 \ + mcr.microsoft.com/mssql/server:2022-latest + + echo "Waiting for SQL Server to be ready..." + for i in {1..30}; do + if docker exec sqlserver-$(LINUX_TAG)-$(ARCH) /opt/mssql-tools18/bin/sqlcmd \ + -S localhost -U SA -P "$(DB_PASSWORD)" -C -Q "SELECT 1" >/dev/null 2>&1; then + echo "✓ SQL Server is ready!" + break + fi + sleep 2 + done + + # Get SQL Server container IP for build container to connect + SQL_IP=$(docker inspect -f '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' sqlserver-$(LINUX_TAG)-$(ARCH)) + echo "SQL Server IP: $SQL_IP" + echo "##vso[task.setvariable variable=SQL_IP]$SQL_IP" + displayName: 'Start SQL Server container for testing' + env: + DB_PASSWORD: $(DB_PASSWORD) + + # Build wheels for all Python versions (3.10-3.14) and test each one - script: | set -euxo pipefail if [[ "$(LINUX_TAG)" == "manylinux" ]]; then SHELL_EXE=bash; else SHELL_EXE=sh; fi docker exec build-$(LINUX_TAG)-$(ARCH) $SHELL_EXE -lc 'mkdir -p /workspace/dist' - for PYBIN in cp310 cp311 cp312 cp313; do - echo "=== Building for $PYBIN on $(LINUX_TAG)/$(ARCH) ===" + # Loop through all Python versions: build wheel -> test wheel -> repeat + for PYBIN in cp310 cp311 cp312 cp313; do # cp314: Life of π: Unfolding in v1.0.0 + echo "" + echo "=====================================================" + echo "Building and testing $PYBIN on $(LINUX_TAG)/$(ARCH)" + echo "=====================================================" + if [[ "$(LINUX_TAG)" == "manylinux" ]]; then - docker exec -e PYBIN=$PYBIN build-$(LINUX_TAG)-$(ARCH) bash -lc ' + # Manylinux (glibc-based) - use bash + docker exec -e PYBIN=$PYBIN -e SQL_IP=$(SQL_IP) -e DB_PASSWORD="$(DB_PASSWORD)" build-$(LINUX_TAG)-$(ARCH) bash -lc ' set -euxo pipefail; + + # Step 1: Setup Python environment PY=/opt/python/${PYBIN}-${PYBIN}/bin/python; - test -x $PY || { echo "Python $PY missing"; exit 0; }; + test -x $PY || { echo "Python $PY missing - skipping"; exit 0; }; ln -sf $PY /usr/local/bin/python; - python -m pip install -U pip setuptools wheel pybind11; + echo "Using: $(python --version)"; + + # Step 2: Install build dependencies + python -m pip install -q -U pip setuptools wheel pybind11; + + # Step 3: Build native extension (.so) + echo "Building native extension..."; cd /workspace/mssql_python/pybind; bash build.sh; + + # Step 4: Build wheel + echo "Building wheel package..."; cd /workspace; python setup.py bdist_wheel; + + # Step 5: Install wheel in isolated directory for testing + echo "Installing wheel in isolated test environment..."; + TEST_DIR="/test_isolated_${PYBIN}"; + rm -rf $TEST_DIR; + mkdir -p $TEST_DIR; + cd $TEST_DIR; + + # Find and install the wheel for this Python version + WHEEL=$(ls /workspace/dist/*${PYBIN}*.whl | head -1); + if [ -z "$WHEEL" ]; then + echo "ERROR: No wheel found for ${PYBIN}"; + exit 1; + fi; + echo "Installing: $WHEEL"; + $PY -m pip install -q "$WHEEL"; + + # Step 6: Verify package imports correctly + echo "Verifying package installation..."; + $PY -c import\ mssql_python; + + # Step 7: Setup test environment + echo "Setting up test environment..."; + $PY -m pip install -q pytest; + cp -r /workspace/tests $TEST_DIR/ || echo "WARNING: No tests directory"; + cp /workspace/pytest.ini $TEST_DIR/ || echo "WARNING: No pytest.ini"; + cp /workspace/requirements.txt $TEST_DIR/ || true; + $PY -m pip install -q -r $TEST_DIR/requirements.txt || true; + + # Step 8: Run pytest (stops on first failure) + if [ -d $TEST_DIR/tests ]; then + echo "Running pytest for ${PYBIN}..."; + DB_CONNECTION_STRING="Server=$SQL_IP;Database=master;Uid=SA;Pwd=$DB_PASSWORD;TrustServerCertificate=yes" \ + $PY -m pytest $TEST_DIR/tests -v --maxfail=1 || { + echo "ERROR: Tests failed for ${PYBIN}"; + exit 1; + }; + echo "✓ All tests passed for ${PYBIN}"; + else + echo "WARNING: No tests found, skipping pytest"; + fi; ' else - docker exec -e PYBIN=$PYBIN build-$(LINUX_TAG)-$(ARCH) sh -lc ' + # Musllinux (musl libc-based) - use sh + docker exec -e PYBIN=$PYBIN -e SQL_IP=$(SQL_IP) -e DB_PASSWORD="$(DB_PASSWORD)" build-$(LINUX_TAG)-$(ARCH) sh -lc ' set -euxo pipefail; + + # Step 1: Setup Python environment PY=/opt/python/${PYBIN}-${PYBIN}/bin/python; - test -x $PY || { echo "Python $PY missing"; exit 0; }; + test -x $PY || { echo "Python $PY missing - skipping"; exit 0; }; ln -sf $PY /usr/local/bin/python; - python -m pip install -U pip setuptools wheel pybind11; + echo "Using: $(python --version)"; + + # Step 2: Install build dependencies + python -m pip install -q -U pip setuptools wheel pybind11; + + # Step 3: Build native extension (.so) + echo "Building native extension..."; cd /workspace/mssql_python/pybind; bash build.sh; + + # Step 4: Build wheel + echo "Building wheel package..."; cd /workspace; python setup.py bdist_wheel; + + # Step 5: Install wheel in isolated directory for testing + echo "Installing wheel in isolated test environment..."; + TEST_DIR="/test_isolated_${PYBIN}"; + rm -rf $TEST_DIR; + mkdir -p $TEST_DIR; + cd $TEST_DIR; + + # Find and install the wheel for this Python version + WHEEL=$(ls /workspace/dist/*${PYBIN}*.whl | head -1); + if [ -z "$WHEEL" ]; then + echo "ERROR: No wheel found for ${PYBIN}"; + exit 1; + fi; + echo "Installing: $WHEEL"; + $PY -m pip install -q "$WHEEL"; + + # Step 6: Verify package imports correctly + echo "Verifying package installation..."; + $PY -c import\ mssql_python; + + # Step 7: Setup test environment + echo "Setting up test environment..."; + $PY -m pip install -q pytest; + cp -r /workspace/tests $TEST_DIR/ || echo "WARNING: No tests directory"; + cp /workspace/pytest.ini $TEST_DIR/ || echo "WARNING: No pytest.ini"; + cp /workspace/requirements.txt $TEST_DIR/ || true; + $PY -m pip install -q -r $TEST_DIR/requirements.txt || true; + + # Step 8: Run pytest (stops on first failure) + if [ -d $TEST_DIR/tests ]; then + echo "Running pytest for ${PYBIN}..."; + DB_CONNECTION_STRING="Server=$SQL_IP;Database=master;Uid=SA;Pwd=$DB_PASSWORD;TrustServerCertificate=yes" \ + $PY -m pytest $TEST_DIR/tests -v --maxfail=1 || { + echo "ERROR: Tests failed for ${PYBIN}"; + exit 1; + }; + echo "✓ All tests passed for ${PYBIN}"; + else + echo "WARNING: No tests found, skipping pytest"; + fi; ' fi + + echo "✓ Build and test complete for $PYBIN" done - displayName: 'Build wheels for Python 3.10-3.13' + + echo "" + echo "=====================================================" + echo "✓ All Python versions built and tested successfully!" + echo "=====================================================" + displayName: 'Build and test wheels for Python 3.10-3.14' + env: + DB_PASSWORD: $(DB_PASSWORD) + # Copy built artifacts from container to host for publishing - script: | set -euxo pipefail - docker cp build-$(LINUX_TAG)-$(ARCH):/workspace/dist/. "$(ob_outputDirectory)/wheels/" || echo "No wheels" + # Copy all wheels (5 Python versions) to output directory + echo "Copying wheels to host..." + docker cp build-$(LINUX_TAG)-$(ARCH):/workspace/dist/. "$(ob_outputDirectory)/wheels/" || echo "No wheels found" + + # Copy native .so bindings for artifact archival + echo "Copying .so bindings to host..." mkdir -p "$(ob_outputDirectory)/bindings/$(LINUX_TAG)-$(ARCH)" docker exec build-$(LINUX_TAG)-$(ARCH) $([[ "$(LINUX_TAG)" == "manylinux" ]] && echo bash -lc || echo sh -lc) ' OUT="/tmp/ddbc-out"; @@ -183,16 +349,22 @@ stages: ' docker cp "build-$(LINUX_TAG)-$(ARCH):/tmp/ddbc-out/." \ - "$(ob_outputDirectory)/bindings/$(LINUX_TAG)-$(ARCH)/" || echo "No .so files" + "$(ob_outputDirectory)/bindings/$(LINUX_TAG)-$(ARCH)/" || echo "No .so files found" + + echo "✓ Artifacts copied successfully" displayName: 'Copy artifacts to host' + # Cleanup: Stop and remove Docker containers - script: | - docker stop build-$(LINUX_TAG)-$(ARCH) || true - docker rm build-$(LINUX_TAG)-$(ARCH) || true - displayName: 'Cleanup container' - condition: always() + echo "Stopping and removing containers..." + docker stop build-$(LINUX_TAG)-$(ARCH) sqlserver-$(LINUX_TAG)-$(ARCH) || true + docker rm build-$(LINUX_TAG)-$(ARCH) sqlserver-$(LINUX_TAG)-$(ARCH) || true + echo "✓ Containers cleaned up" + displayName: 'Cleanup containers' + condition: always() # Always run cleanup, even if build/test fails - # Explicit publish with OneBranch-compliant artifact name + # Publish artifacts to Azure Pipelines for downstream consumption + # OneBranch requires specific artifact naming: drop__ - task: PublishPipelineArtifact@1 displayName: 'Publish Linux Artifacts' inputs: @@ -200,14 +372,18 @@ stages: artifact: 'drop_${{ parameters.stageName }}_${{ parameters.jobName }}' publishLocation: 'pipeline' - # General malware scanning (Component Governance + OneBranch AntiMalware) + # Security Scanning: Component Governance + OneBranch AntiMalware + # Scans wheels and binaries for known vulnerabilities and malware signatures - template: ../steps/malware-scanning-step.yml@self parameters: scanPath: '$(ob_outputDirectory)' artifactType: 'dll' - # ESRP Malware scanning (Official builds only) - - ${{ if and(eq(parameters.signingEnabled, true), eq(parameters.oneBranchType, 'Official')) }}: + # ESRP Malware Scanning (Official Builds Only) + # ESRP = Microsoft's Enterprise Signing and Release Platform + # Scans wheel files for malware using Microsoft Defender and custom signatures + # Only runs for Official builds (production compliance requirement) + - ${{ if eq(parameters.oneBranchType, 'Official') }}: - task: EsrpMalwareScanning@5 displayName: 'ESRP MalwareScanning - Python Wheels (Official)' inputs: @@ -222,25 +398,9 @@ stages: CleanupTempStorage: 1 VerboseLogin: 1 - # ESRP Malware scanning (when signing is enabled) - - ${{ if eq(parameters.signingEnabled, true) }}: - - task: EsrpMalwareScanning@5 - displayName: 'ESRP MalwareScanning - Python Wheels' - inputs: - ConnectedServiceName: '$(SigningEsrpConnectedServiceName)' - AppRegistrationClientId: '$(SigningAppRegistrationClientId)' - AppRegistrationTenantId: '$(SigningAppRegistrationTenantId)' - EsrpClientId: '$(SigningEsrpClientId)' - UseMSIAuthentication: true - FolderPath: '$(ob_outputDirectory)/wheels' - Pattern: '*.whl' - SessionTimeout: 60 - CleanupTempStorage: 1 - VerboseLogin: 1 - # ESRP Code Signing (DISABLED - wheel files cannot be signed with SignTool) # See compound-esrp-code-signing-step.yml for detailed explanation of why this doesn't work - # - ${{ if eq(parameters.signingEnabled, true) }}: + # - ${{ if eq(parameters.oneBranchType, 'Official') }}: # - template: /OneBranchPipelines/steps/compound-esrp-code-signing-step.yml@self # parameters: # appRegistrationClientId: '$(SigningAppRegistrationClientId)' diff --git a/OneBranchPipelines/stages/build-macos-single-stage.yml b/OneBranchPipelines/stages/build-macos-single-stage.yml index ee538098..71ccaf60 100644 --- a/OneBranchPipelines/stages/build-macos-single-stage.yml +++ b/OneBranchPipelines/stages/build-macos-single-stage.yml @@ -1,56 +1,75 @@ # macOS Single Configuration Stage Template # Builds Python wheel for a specific Python version (universal2 binary) +# Universal2 = combined x86_64 + ARM64 binary in single .so file +# Tests with Docker-based SQL Server (using Colima as Docker runtime) parameters: + # Stage identifier (e.g., 'MacOS_py312') - name: stageName type: string + # Job identifier within the stage - name: jobName type: string default: 'BuildWheel' + # Python version in X.Y format (e.g., '3.12') - name: pythonVersion type: string + # Python version as 3-digit string for file naming (e.g., '312') - name: shortPyVer type: string + # OneBranch build type: 'Official' (production) or 'NonOfficial' (dev/test) - name: oneBranchType type: string default: 'Official' - - name: signingEnabled - type: boolean - default: true - - name: buildConfiguration - type: string - default: 'Release' stages: - stage: ${{ parameters.stageName }} - displayName: 'Build macOS Python ${{ parameters.pythonVersion }}' + displayName: 'macOS Py${{ parameters.pythonVersion }} Universal2' jobs: - job: ${{ parameters.jobName }} - displayName: 'Build Wheel - Python ${{ parameters.pythonVersion }} universal2' + displayName: 'Build Wheel - Py${{ parameters.pythonVersion }} Universal2' + # Pool Configuration + # macOS-14 image = macOS Sonoma with Xcode 15, Python 3.x toolchain + # type:linux is Azure Pipelines quirk (macOS pools declare as 'linux' type) pool: type: linux isCustom: true name: Azure Pipelines vmImage: 'macOS-14' + # 120-minute timeout (universal2 builds take longer due to dual-architecture compilation) timeoutInMinutes: 120 + # Build Variables variables: - # Disable BinSkim for macOS - primarily designed for Windows binaries + # Disable BinSkim (Windows-focused binary analyzer) - macOS uses Mach-O format, not PE - name: ob_sdl_binskim_enabled value: false + # OneBranch artifact output directory - name: ob_outputDirectory value: '$(Build.ArtifactStagingDirectory)' + # Linux container image (unused in macOS builds, but required by OneBranch template) - name: LinuxContainerImage value: 'onebranch.azurecr.io/linux/ubuntu-2204:latest' + # Python version in X.Y format (e.g., '3.12') - name: pythonVersion value: ${{ parameters.pythonVersion }} + # Python version as 3-digit string (e.g., '312') for file naming - name: shortPyVer value: ${{ parameters.shortPyVer }} steps: + # ========================= + # SOURCE CODE CHECKOUT + # ========================= + # fetchDepth: 0 = full git history (needed for version tagging) - checkout: self fetchDepth: 0 + # ========================= + # PYTHON INSTALLATION + # ========================= + # UsePythonVersion@0 supports Python 3.10-3.14 on macOS + # No need for NuGet download like Windows (3.14 is in Azure Pipelines registry) - task: UsePythonVersion@0 inputs: versionSpec: '${{ parameters.pythonVersion }}' @@ -58,12 +77,24 @@ stages: displayName: 'Use Python ${{ parameters.pythonVersion }} (Universal2)' continueOnError: false + # ========================= + # BUILD TOOLS + # ========================= + # CMake = cross-platform build system generator (needed for C++ compilation) + # Uninstall first to ensure clean version (avoid conflicts with pre-installed CMake) - script: | brew update brew uninstall cmake --ignore-dependencies || echo "CMake not installed" brew install cmake displayName: 'Install CMake' + # ========================= + # PYTHON DEPENDENCIES + # ========================= + # Install build dependencies: + # - requirements.txt: runtime dependencies (if any) + # - cmake: CMake Python wrapper + # - pybind11: C++/Python binding library (headers needed for compilation) - script: | python --version python -m pip --version @@ -72,6 +103,12 @@ stages: python -m pip install cmake pybind11 displayName: 'Install dependencies' + # ========================= + # NATIVE EXTENSION BUILD + # ========================= + # Build universal2 .so binary (x86_64 + ARM64 in single file) + # build.sh sets ARCHFLAGS="-arch x86_64 -arch arm64" for clang + # Output: mssql_python.cpython-3XX-darwin.so (Mach-O universal binary) - script: | echo "Python Version: ${{ parameters.pythonVersion }}" echo "Building Universal2 Binary" @@ -80,6 +117,8 @@ stages: displayName: 'Build .so file' continueOnError: false + # Copy native extension to artifact directory for later inspection + # .so file will be packaged into wheel in later step - task: CopyFiles@2 inputs: SourceFolder: '$(Build.SourcesDirectory)/mssql_python' @@ -87,17 +126,30 @@ stages: TargetFolder: '$(ob_outputDirectory)/bindings/macOS' displayName: 'Copy .so files' + # Install Docker CLI and Colima (macOS Docker runtime) + # Colima = lightweight Docker Desktop alternative using macOS virtualization + # vz = native macOS virtualization (faster, only works on M1+) + # qemu = cross-platform emulator (slower, works on Intel Macs) + # 4 CPU cores + 8GB RAM needed for SQL Server container - script: | brew update brew install docker colima - colima start --cpu 3 --memory 10 --disk 30 --vm-type=vz || \ - colima start --cpu 3 --memory 10 --disk 30 --vm-type=qemu + colima start --vm-type vz --cpu 4 --memory 8 || { + echo "vz VM failed, trying qemu..." + colima start --vm-type qemu --cpu 4 --memory 8 + } sleep 30 docker context use colima >/dev/null || true docker version displayName: 'Install and start Docker (Colima)' timeoutInMinutes: 15 + # ========================= + # SQL SERVER CONTAINER + # ========================= + # Start SQL Server 2022 Docker container for pytest execution + # macOS uses host networking (localhost:1433) vs Linux uses container IP + # Container runs in background (-d) and accepts connections on port 1433 - script: | docker pull mcr.microsoft.com/mssql/server:2022-latest docker run --name sqlserver \ @@ -106,6 +158,8 @@ stages: -p 1433:1433 -d \ mcr.microsoft.com/mssql/server:2022-latest + # Wait for SQL Server to accept connections (up to 60 seconds) + # sqlcmd -C flag = trust server certificate (for TLS connection) for i in {1..30}; do docker exec sqlserver /opt/mssql-tools18/bin/sqlcmd \ -S localhost -U SA -P "$DB_PASSWORD" -C -Q "SELECT 1" && break @@ -115,17 +169,36 @@ stages: env: DB_PASSWORD: $(DB_PASSWORD) + # ========================= + # TESTING + # ========================= + # Run pytest against SQL Server container + # Tests use localhost:1433 connection (SA user with password from variable) + # -v = verbose output (show test names and results) - script: | python -m pytest -v displayName: 'Run pytests' env: + # Connection string uses localhost (SQL Server container exposed on port 1433) + # TrustServerCertificate=yes bypasses SSL cert validation (test env only) DB_CONNECTION_STRING: 'Server=tcp:127.0.0.1,1433;Database=master;Uid=SA;Pwd=$(DB_PASSWORD);TrustServerCertificate=yes' + # ========================= + # WHEEL BUILD + # ========================= + # Build wheel package from setup.py + # Wheel filename: mssql_python-X.Y.Z-cp3XX-cp3XX-macosx_XX_X_universal2.whl + # bdist_wheel = build binary wheel distribution (contains pre-compiled .so) - script: | python -m pip install --upgrade pip wheel setuptools python setup.py bdist_wheel displayName: 'Build wheel package' + # ========================= + # ARTIFACT PUBLISHING + # ========================= + # Copy wheel to OneBranch output directory + # dist/ = output directory from setup.py bdist_wheel - task: CopyFiles@2 inputs: SourceFolder: '$(Build.SourcesDirectory)/dist' @@ -133,7 +206,9 @@ stages: TargetFolder: '$(ob_outputDirectory)/wheels' displayName: 'Copy wheel files' - # Explicit publish with OneBranch-compliant artifact name + # Publish all artifacts (wheels + .so files) for Consolidate stage + # Artifact naming: drop__ (OneBranch requirement) + # Consolidate stage downloads this artifact via 'dependsOn' dependency - task: PublishPipelineArtifact@1 displayName: 'Publish macOS Artifacts' inputs: @@ -141,14 +216,21 @@ stages: artifact: 'drop_${{ parameters.stageName }}_${{ parameters.jobName }}' publishLocation: 'pipeline' - # General malware scanning (Component Governance + OneBranch AntiMalware) + # ========================= + # SECURITY SCANNING + # ========================= + # Component Governance + OneBranch AntiMalware scanning + # artifactType:'dll' is misnomer - scans all binary files (.so, .dylib, etc.) - template: ../steps/malware-scanning-step.yml@self parameters: scanPath: '$(ob_outputDirectory)' artifactType: 'dll' - # ESRP Malware scanning (Official builds only) - - ${{ if and(eq(parameters.signingEnabled, true), eq(parameters.oneBranchType, 'Official')) }}: + # ESRP Malware Scanning (Official Builds Only) + # ESRP = Microsoft's Enterprise Signing and Release Platform + # Scans wheel files for malware using Microsoft Defender and custom signatures + # Only runs for Official builds (production compliance requirement) + - ${{ if eq(parameters.oneBranchType, 'Official') }}: - task: EsrpMalwareScanning@5 displayName: 'ESRP MalwareScanning - Python Wheels (Official)' inputs: @@ -158,30 +240,14 @@ stages: EsrpClientId: '$(SigningEsrpClientId)' UseMSIAuthentication: true FolderPath: '$(ob_outputDirectory)/wheels' - Pattern: '*.whl' - SessionTimeout: 60 - CleanupTempStorage: 1 - VerboseLogin: 1 - - # ESRP Malware scanning (when signing is enabled) - - ${{ if eq(parameters.signingEnabled, true) }}: - - task: EsrpMalwareScanning@5 - displayName: 'ESRP MalwareScanning - Python Wheels' - inputs: - ConnectedServiceName: '$(SigningEsrpConnectedServiceName)' - AppRegistrationClientId: '$(SigningAppRegistrationClientId)' - AppRegistrationTenantId: '$(SigningAppRegistrationTenantId)' - EsrpClientId: '$(SigningEsrpClientId)' - UseMSIAuthentication: true - FolderPath: '$(ob_outputDirectory)/wheels' - Pattern: '*.whl' + Pattern: '*.whl' # Scan all wheel files SessionTimeout: 60 CleanupTempStorage: 1 VerboseLogin: 1 # ESRP Code Signing (DISABLED - wheel files cannot be signed with SignTool) # See compound-esrp-code-signing-step.yml for detailed explanation of why this doesn't work - # - ${{ if eq(parameters.signingEnabled, true) }}: + # - ${{ if eq(parameters.oneBranchType, 'Official') }}: # - template: /OneBranchPipelines/steps/compound-esrp-code-signing-step.yml@self # parameters: # appRegistrationClientId: '$(SigningAppRegistrationClientId)' diff --git a/OneBranchPipelines/stages/build-windows-single-stage.yml b/OneBranchPipelines/stages/build-windows-single-stage.yml index 077d1cc3..b432f15e 100644 --- a/OneBranchPipelines/stages/build-windows-single-stage.yml +++ b/OneBranchPipelines/stages/build-windows-single-stage.yml @@ -1,60 +1,119 @@ # Windows Single Configuration Stage Template # Builds Python wheel for a specific Python version and architecture +# Supports both x64 (AMD64) and ARM64 cross-compilation +# Tests x64 builds with pytest (ARM64 binaries can't run on x64 host) parameters: + # Stage identifier (e.g., 'Win_py312_x64') - name: stageName type: string + # Job identifier within the stage - name: jobName type: string default: 'BuildWheel' + # Python version in X.Y format (e.g., '3.12') - name: pythonVersion type: string + # Python version as 3-digit string for file naming (e.g., '312') - name: shortPyVer type: string + # Target architecture: 'x64' (AMD64) or 'arm64' (ARM64) - name: architecture type: string + # OneBranch build type: 'Official' (production) or 'NonOfficial' (dev/test) - name: oneBranchType type: string default: 'Official' - - name: signingEnabled - type: boolean - default: true - - name: buildConfiguration - type: string - default: 'Release' + # Publish PDB symbols to symbol server (disabled by default, handled in release pipeline) - name: publishSymbols type: boolean default: true stages: - stage: ${{ parameters.stageName }} - displayName: 'Build Windows Python ${{ parameters.pythonVersion }} ${{ parameters.architecture }}' + displayName: 'Windows Py${{ parameters.pythonVersion }} ${{ parameters.architecture }}' jobs: - job: ${{ parameters.jobName }} - displayName: 'Build Wheel - Python ${{ parameters.pythonVersion }} ${{ parameters.architecture }}' + displayName: 'Build Wheel - Py${{ parameters.pythonVersion }} ${{ parameters.architecture }}' + # Use custom 1ES pool with Windows Server 2022 + SQL Server 2022 pre-installed pool: type: windows isCustom: true name: Django-1ES-pool vmImage: WIN22-SQL22 + # Extended timeout for downloads, builds, and testing timeoutInMinutes: 120 variables: + # OneBranch output directory for artifacts (wheels, bindings, symbols) ob_outputDirectory: '$(Build.ArtifactStagingDirectory)' + # OneBranch-required variable (unused in this template) LinuxContainerImage: 'onebranch.azurecr.io/linux/ubuntu-2204:latest' + # Python version passed to build scripts pythonVersion: ${{ parameters.pythonVersion }} + # Short Python version for file naming (e.g., '312') shortPyVer: ${{ parameters.shortPyVer }} + # Target architecture (can differ from host for cross-compilation) targetArch: ${{ parameters.architecture }} - SYSTEM_ACCESSTOKEN: $(System.AccessToken) # Make System.AccessToken available to all steps in this job + # System access token for authenticated downloads (e.g., GitHub artifacts) + SYSTEM_ACCESSTOKEN: $(System.AccessToken) steps: - checkout: self fetchDepth: 0 + # Python 3.14 Installation: Download from NuGet (not yet in UsePythonVersion@0 task) + # Microsoft hasn't added Python 3.14 to the standard Python registry yet + - powershell: | + $pythonVer = "${{ parameters.pythonVersion }}" + + if ($pythonVer -eq "3.14") { + Write-Host "Python 3.14 detected - downloading from NuGet..." + + # Download Python 3.14 x64 from NuGet (stable release) + $nugetUrl = "https://www.nuget.org/api/v2/package/python/3.14.0" + $nugetFile = "$(Build.SourcesDirectory)\python-x64.nupkg" + $zipFile = "$(Build.SourcesDirectory)\python-x64.zip" + $extractPath = "C:\Python314-NuGet" + + Write-Host "Downloading Python 3.14 x64 from: $nugetUrl" + Invoke-WebRequest -Uri $nugetUrl -OutFile $nugetFile -UseBasicParsing + + Write-Host "Extracting NuGet package..." + Move-Item -Path $nugetFile -Destination $zipFile -Force + Expand-Archive -Path $zipFile -DestinationPath $extractPath -Force + + # Python executable is in tools directory + $pythonDir = "$extractPath\tools" + + Write-Host "Setting up Python at: $pythonDir" + + # Create C:\Python314 for consistent paths + New-Item -ItemType Directory -Force -Path "C:\Python314" | Out-Null + Copy-Item -Path "$pythonDir\*" -Destination "C:\Python314" -Recurse -Force + + Write-Host "`nVerifying Python installation:" + & "C:\Python314\python.exe" --version + & "C:\Python314\python.exe" -c "import sys; print('Python:', sys.executable)" + + # Add to PATH + Write-Host "##vso[task.prependpath]C:\Python314" + Write-Host "##vso[task.prependpath]C:\Python314\Scripts" + + # Cleanup + Remove-Item -Path $zipFile -Force -ErrorAction SilentlyContinue + Remove-Item -Path $nugetFile -Force -ErrorAction SilentlyContinue + } + condition: eq('${{ parameters.pythonVersion }}', '3.14') + displayName: 'Download and install Python 3.14 from NuGet' + + # Python 3.10-3.13: Use standard Azure Pipelines task + # UsePythonVersion@0 supports these versions natively - task: UsePythonVersion@0 inputs: versionSpec: '${{ parameters.pythonVersion }}' architecture: 'x64' addToPath: true + condition: ne('${{ parameters.pythonVersion }}', '3.14') displayName: 'Use Python ${{ parameters.pythonVersion }} (${{ parameters.architecture }})' continueOnError: false @@ -75,6 +134,8 @@ stages: Write-Host "Dependencies installed successfully" displayName: 'Install Python dependencies' + # Start SQL Server LocalDB for pytest execution + # LocalDB is a lightweight SQL Server instance pre-installed on WIN22-SQL22 agents - powershell: | sqllocaldb create MSSQLLocalDB sqllocaldb start MSSQLLocalDB @@ -89,6 +150,8 @@ stages: env: DB_PASSWORD: $(DB_PASSWORD) + # Download ARM64 Python libraries for cross-compilation (ARM64 builds only) + # ARM64 wheels must be built on x64 host using ARM64 python.lib - powershell: | # Download Python ARM64 from NuGet (contains libs directory with python.lib) $pythonVer = "${{ parameters.pythonVersion }}" @@ -99,8 +162,7 @@ stages: "3.11" { "3.11.9" } "3.12" { "3.12.7" } "3.13" { "3.13.0" } - "3.14" { "3.14.0-a2" } - default { throw "Unsupported Python version: $pythonVer" } + "3.14" { "3.14.0" } } $nugetUrl = "https://www.nuget.org/api/v2/package/pythonarm64/$nugetVersion" @@ -153,6 +215,8 @@ stages: condition: eq(variables['targetArch'], 'arm64') displayName: 'Download Python ARM64 libs from NuGet' + # Build native Python extension (.pyd) using MSVC and CMake + # For ARM64: Uses CUSTOM_PYTHON_LIB_DIR to link against ARM64 python.lib - script: | echo "Python Version: $(pythonVersion)" echo "Short Tag: $(shortPyVer)" @@ -173,6 +237,11 @@ stages: displayName: 'Build PYD for $(targetArch)' continueOnError: false + # ========================= + # TESTING + # ========================= + # Run pytest to validate bindings (x64 only) + # ARM64 binaries cannot execute on x64 host, so tests are skipped - powershell: | Write-Host "Running pytests to validate bindings" if ("$(targetArch)" -eq "arm64") { @@ -184,6 +253,8 @@ stages: env: DB_CONNECTION_STRING: 'Server=(localdb)\MSSQLLocalDB;Database=TestDB;Uid=testuser;Pwd=$(DB_PASSWORD);TrustServerCertificate=yes' + # Copy artifacts to OneBranch output directory for publishing + # PYD files: Native Python extensions (ddbc_bindings.cpXXX-win_xxx.pyd) - task: CopyFiles@2 inputs: SourceFolder: '$(Build.SourcesDirectory)\mssql_python\pybind\build\$(targetArch)\py$(shortPyVer)\Release' @@ -191,6 +262,7 @@ stages: TargetFolder: '$(ob_outputDirectory)\bindings\windows' displayName: 'Copy PYD files' + # PDB files: Debugging symbols for native code - task: CopyFiles@2 inputs: SourceFolder: '$(Build.SourcesDirectory)\mssql_python\pybind\build\$(targetArch)\py$(shortPyVer)\Release' @@ -198,7 +270,8 @@ stages: TargetFolder: '$(ob_outputDirectory)\symbols' displayName: 'Copy PDB files' - # Copy files to ApiScan directories (for globalSdl scanning) + # Copy PYD to ApiScan directory for SDL security scanning + # BinSkim and other tools scan binaries from this location - task: CopyFiles@2 inputs: SourceFolder: '$(Build.SourcesDirectory)\mssql_python\pybind\build\$(targetArch)\py$(shortPyVer)\Release' @@ -213,13 +286,18 @@ stages: TargetFolder: '$(Build.SourcesDirectory)\apiScan\pdbs\windows\py$(shortPyVer)\$(targetArch)' displayName: 'Copy PDB to ApiScan directory' - + # Build Python wheel package from source distribution + # ARCHITECTURE environment variable controls target platform tagging - script: | python -m pip install --upgrade pip wheel setuptools set ARCHITECTURE=$(targetArch) python setup.py bdist_wheel displayName: 'Build wheel package' + # ========================= + # ARTIFACT PUBLISHING + # ========================= + # Copy wheel to OneBranch output directory - task: CopyFiles@2 inputs: SourceFolder: '$(Build.SourcesDirectory)\dist' @@ -227,7 +305,8 @@ stages: TargetFolder: '$(ob_outputDirectory)\wheels' displayName: 'Copy wheel files' - # Explicit publish with OneBranch-compliant artifact name + # Publish artifacts to Azure Pipelines for downstream consumption + # OneBranch requires specific artifact naming: drop__ - task: PublishPipelineArtifact@1 displayName: 'Publish Windows Artifacts' inputs: @@ -235,16 +314,20 @@ stages: artifact: 'drop_${{ parameters.stageName }}_${{ parameters.jobName }}' publishLocation: 'pipeline' - # General malware scanning (Component Governance + OneBranch AntiMalware) + # Security Scanning: Component Governance + OneBranch AntiMalware + # Scans PYD files and wheels for known vulnerabilities and malware signatures - template: /OneBranchPipelines/steps/malware-scanning-step.yml@self parameters: scanPath: '$(ob_outputDirectory)' artifactType: 'dll' - # ESRP Malware scanning (when signing is enabled) - - ${{ if eq(parameters.signingEnabled, true) }}: + # ESRP Malware Scanning (Official Builds Only) + # ESRP = Microsoft's Enterprise Signing and Release Platform + # Scans wheel files for malware using Microsoft Defender and custom signatures + # Only runs for Official builds (production compliance requirement) + - ${{ if eq(parameters.oneBranchType, 'Official') }}: - task: EsrpMalwareScanning@5 - displayName: 'ESRP MalwareScanning - Python Wheels' + displayName: 'ESRP MalwareScanning - Python Wheels (Official)' inputs: ConnectedServiceName: '$(SigningEsrpConnectedServiceName)' AppRegistrationClientId: '$(SigningAppRegistrationClientId)' @@ -259,7 +342,7 @@ stages: # ESRP Code Signing (DISABLED - wheel files cannot be signed with SignTool) # See compound-esrp-code-signing-step.yml for detailed explanation of why this doesn't work - # - ${{ if eq(parameters.signingEnabled, true) }}: + # - ${{ if eq(parameters.oneBranchType, 'Official') }}: # - template: /OneBranchPipelines/steps/compound-esrp-code-signing-step.yml@self # parameters: # appRegistrationClientId: '$(SigningAppRegistrationClientId)' diff --git a/tests/test_000_dependencies.py b/tests/test_000_dependencies.py index f5339e2d..c558f1e6 100644 --- a/tests/test_000_dependencies.py +++ b/tests/test_000_dependencies.py @@ -562,8 +562,10 @@ def test_ddbc_bindings_extension_detection(): # We can verify this by checking what the module import system expects # The extension detection logic is used during import import os + import mssql_python - module_dir = os.path.dirname(__file__).replace("tests", "mssql_python") + # Get the actual installed module directory + module_dir = os.path.dirname(mssql_python.__file__) # Check that some ddbc_bindings file exists with the expected extension ddbc_files = [