diff --git a/.config/CredScanSuppressions.json b/.config/CredScanSuppressions.json
new file mode 100644
index 00000000..ad131493
--- /dev/null
+++ b/.config/CredScanSuppressions.json
@@ -0,0 +1,21 @@
+{
+ "tool": "Credential Scanner",
+ "suppressions": [
+ {
+ "file": "tests/*",
+ "justification": "Test projects contain sample credentials and should be skipped"
+ },
+ {
+ "file": "benchmarks/*",
+ "justification": "Benchmark code may include test connection strings"
+ },
+ {
+ "file": "eng/*",
+ "justification": "Engineering and pipeline configuration files"
+ },
+ {
+ "file": "OneBranchPipelines/*",
+ "justification": "OneBranch pipeline configuration files"
+ }
+ ]
+}
diff --git a/.config/PolicheckExclusions.xml b/.config/PolicheckExclusions.xml
new file mode 100644
index 00000000..a08b7514
--- /dev/null
+++ b/.config/PolicheckExclusions.xml
@@ -0,0 +1,11 @@
+
+
+
+ tests|benchmarks|eng|OneBranchPipelines|examples|docs|build-artifacts|dist|__pycache__|myvenv|testenv
+
+
+
+
+ CHANGELOG.md|README.md|LICENSE|NOTICE.txt|ROADMAP.md|CODE_OF_CONDUCT.md|CONTRIBUTING.md|SECURITY.md|SUPPORT.md
+
+
\ No newline at end of file
diff --git a/.config/tsaoptions.json b/.config/tsaoptions.json
new file mode 100644
index 00000000..4fbaf755
--- /dev/null
+++ b/.config/tsaoptions.json
@@ -0,0 +1,14 @@
+{
+ "instanceUrl": "https://sqlclientdrivers.visualstudio.com",
+ "projectName": "mssql-python",
+ "areaPath": "mssql-python",
+ "iterationPath": "mssql-python",
+ "notificationAliases": [
+ "mssql-python@microsoft.com"
+ ],
+ "repositoryName": "mssql-python",
+ "codebaseName": "Microsoft Python Driver for SQL Server",
+ "allTools": true,
+ "includePathPatterns": "mssql_python/*, setup.py, requirements.txt",
+ "excludePathPatterns": "tests/*, benchmarks/*, eng/*, OneBranchPipelines/*"
+}
diff --git a/.gdn/.gdnbaselines b/.gdn/.gdnbaselines
new file mode 100644
index 00000000..72cabd47
--- /dev/null
+++ b/.gdn/.gdnbaselines
@@ -0,0 +1,300 @@
+{
+ "hydrated": false,
+ "properties": {
+ "helpUri": "https://eng.ms/docs/microsoft-security/security/azure-security/cloudai-security-fundamentals-engineering/security-integration/guardian-wiki/microsoft-guardian/general/baselines"
+ },
+ "version": "1.0.0",
+ "baselines": {
+ "default": {
+ "name": "default",
+ "createdDate": "2025-11-10 15:00:51Z",
+ "lastUpdatedDate": "2025-11-10 15:00:51Z"
+ }
+ },
+ "results": {
+ "aade958c0f923536ba575ebaaf1ce15a85f6c45b73e7785c2c15fb5a2c94408e": {
+ "signature": "aade958c0f923536ba575ebaaf1ce15a85f6c45b73e7785c2c15fb5a2c94408e",
+ "alternativeSignatures": [
+ "c59f521d29345c75983ad0e494c2e55e3a4c41ac35b7163da488a9f78c864f63"
+ ],
+ "memberOf": [
+ "default"
+ ],
+ "createdDate": "2025-11-10 15:00:51Z"
+ },
+ "a7d351fb49883535cfb307e2a4f77636ae5e54a94af99406f96d2558bd643edc": {
+ "signature": "a7d351fb49883535cfb307e2a4f77636ae5e54a94af99406f96d2558bd643edc",
+ "alternativeSignatures": [],
+ "memberOf": [
+ "default"
+ ],
+ "createdDate": "2025-11-10 15:00:51Z"
+ },
+ "1ba31ce1ab7a0b18ae9e504ad24c48f235eab0e6dcb3ad960a7a89b9c48b077a": {
+ "signature": "1ba31ce1ab7a0b18ae9e504ad24c48f235eab0e6dcb3ad960a7a89b9c48b077a",
+ "alternativeSignatures": [],
+ "memberOf": [
+ "default"
+ ],
+ "createdDate": "2025-11-10 15:00:51Z"
+ },
+ "f7e51f21d47b749dd39359b75955ad1c0cf382c0a78426bcb31539bc0a88374b": {
+ "signature": "f7e51f21d47b749dd39359b75955ad1c0cf382c0a78426bcb31539bc0a88374b",
+ "alternativeSignatures": [],
+ "memberOf": [
+ "default"
+ ],
+ "createdDate": "2025-11-10 15:00:51Z"
+ },
+ "57bee1c81911d2ba66861c1deebf33ec0ec5fa5d946666748017493ead017d53": {
+ "signature": "57bee1c81911d2ba66861c1deebf33ec0ec5fa5d946666748017493ead017d53",
+ "alternativeSignatures": [],
+ "memberOf": [
+ "default"
+ ],
+ "createdDate": "2025-11-10 15:00:51Z"
+ },
+ "278585c30d0968e80928c1d86455aa32481e0b97b0cdbba1f20073e70398a0b8": {
+ "signature": "278585c30d0968e80928c1d86455aa32481e0b97b0cdbba1f20073e70398a0b8",
+ "alternativeSignatures": [],
+ "memberOf": [
+ "default"
+ ],
+ "createdDate": "2025-11-10 15:00:51Z"
+ },
+ "974a35997c6b2cdbb802ee711e2265e93f2f788f7ab976c05fbf7894e9248855": {
+ "signature": "974a35997c6b2cdbb802ee711e2265e93f2f788f7ab976c05fbf7894e9248855",
+ "alternativeSignatures": [],
+ "memberOf": [
+ "default"
+ ],
+ "createdDate": "2025-11-10 15:00:51Z"
+ },
+ "6064d60cf011d4ef6771441256423be8099dafb8d1f11cc066365115c18f51ab": {
+ "signature": "6064d60cf011d4ef6771441256423be8099dafb8d1f11cc066365115c18f51ab",
+ "alternativeSignatures": [],
+ "memberOf": [
+ "default"
+ ],
+ "createdDate": "2025-11-10 15:00:51Z"
+ },
+ "6b32b6a40b729abe443c04556b5a1c8fdcbbd27f1b6ae1d0d44ac75fa0dd38d5": {
+ "signature": "6b32b6a40b729abe443c04556b5a1c8fdcbbd27f1b6ae1d0d44ac75fa0dd38d5",
+ "alternativeSignatures": [],
+ "memberOf": [
+ "default"
+ ],
+ "createdDate": "2025-11-10 15:00:51Z"
+ },
+ "8ef0a26f4366de0ec76cc6e929cceae58295937b3dce9d31471657091c9c9986": {
+ "signature": "8ef0a26f4366de0ec76cc6e929cceae58295937b3dce9d31471657091c9c9986",
+ "alternativeSignatures": [],
+ "memberOf": [
+ "default"
+ ],
+ "createdDate": "2025-11-10 15:00:51Z"
+ },
+ "f1fa10a58cac2aca8946aba45e4a1d10f8ef6b86b433ed49b58910d3205149cc": {
+ "signature": "f1fa10a58cac2aca8946aba45e4a1d10f8ef6b86b433ed49b58910d3205149cc",
+ "alternativeSignatures": [],
+ "memberOf": [
+ "default"
+ ],
+ "createdDate": "2025-11-10 15:00:51Z"
+ },
+ "39c0c5997e05cc2c4bbd182acf975698088e87d358e196008147ffafde9f43e2": {
+ "signature": "39c0c5997e05cc2c4bbd182acf975698088e87d358e196008147ffafde9f43e2",
+ "alternativeSignatures": [],
+ "memberOf": [
+ "default"
+ ],
+ "createdDate": "2025-11-10 15:00:51Z"
+ },
+ "097d40852758d2660cdc7865c1b9cb638ec9165685773916e960efca725bb6cd": {
+ "signature": "097d40852758d2660cdc7865c1b9cb638ec9165685773916e960efca725bb6cd",
+ "alternativeSignatures": [],
+ "memberOf": [
+ "default"
+ ],
+ "createdDate": "2025-11-10 15:00:51Z"
+ },
+ "9f83def0a176d3aa7dc47f6443ab546ba717e2b16a552e229784b171a18e55f5": {
+ "signature": "9f83def0a176d3aa7dc47f6443ab546ba717e2b16a552e229784b171a18e55f5",
+ "alternativeSignatures": [],
+ "memberOf": [
+ "default"
+ ],
+ "createdDate": "2025-11-10 15:00:51Z"
+ },
+ "86966d5f6215bf5ae8c1b4d894caa6b69cc678374ab7a2321695dca35fc55923": {
+ "signature": "86966d5f6215bf5ae8c1b4d894caa6b69cc678374ab7a2321695dca35fc55923",
+ "alternativeSignatures": [
+ "4c8f75669e65355d034fcd3be56ebf462134e0ff2fec2605d04bccdb36e68111"
+ ],
+ "memberOf": [
+ "default"
+ ],
+ "createdDate": "2025-11-10 15:00:51Z"
+ },
+ "d07377aee65d4515741765e830ea055dfe6df987f8f2f6399dfff1b6928115f5": {
+ "signature": "d07377aee65d4515741765e830ea055dfe6df987f8f2f6399dfff1b6928115f5",
+ "alternativeSignatures": [
+ "c0bcaaad531041aae4bc6bd88f452c845de3fb2b3825ab9b7ed1282cf2c548dd"
+ ],
+ "memberOf": [
+ "default"
+ ],
+ "createdDate": "2025-11-10 15:00:51Z"
+ },
+ "bb10304b655f6008876c0671e0e8c73a858fc040867f340464dfc479cd9c3ba9": {
+ "signature": "bb10304b655f6008876c0671e0e8c73a858fc040867f340464dfc479cd9c3ba9",
+ "alternativeSignatures": [
+ "ee06cd1fcac7607b9f9103d3572ae7468bb3c43350639c2798a91017851442ed"
+ ],
+ "memberOf": [
+ "default"
+ ],
+ "createdDate": "2025-11-10 15:00:51Z"
+ },
+ "7df253f960bd38300d111d29e106cd8c4fbdcb1d9e1420b8f8b5efa702cc0d6b": {
+ "signature": "7df253f960bd38300d111d29e106cd8c4fbdcb1d9e1420b8f8b5efa702cc0d6b",
+ "alternativeSignatures": [
+ "9f54994c0e212ec81244442d324a11d5bc2b20233eeef67e251767186fd0743e"
+ ],
+ "memberOf": [
+ "default"
+ ],
+ "createdDate": "2025-11-10 15:00:51Z"
+ },
+ "bd9c1992728d9d1798329af6f6dc8ae44d7058a7d8f15b9001c009200ec0aaa3": {
+ "signature": "bd9c1992728d9d1798329af6f6dc8ae44d7058a7d8f15b9001c009200ec0aaa3",
+ "alternativeSignatures": [
+ "1bb6c80c485a4385f09c8fe2ecd7f65b310fcbbc9987456db0c9372f2f9c479d"
+ ],
+ "memberOf": [
+ "default"
+ ],
+ "createdDate": "2025-11-10 15:00:51Z"
+ },
+ "e8040349a51b39e6f9eb478d16128184865096ad79e35f1687e8f36bce9d0021": {
+ "signature": "e8040349a51b39e6f9eb478d16128184865096ad79e35f1687e8f36bce9d0021",
+ "alternativeSignatures": [
+ "7ac989754684da6e6398df0fa8e9b38e63d43f536098574e98f8d82f987c9e64"
+ ],
+ "memberOf": [
+ "default"
+ ],
+ "createdDate": "2025-11-10 15:00:51Z"
+ },
+ "99dbea9de7468dde3ab131a4c21f572fc19ff010730062451187de094abe9413": {
+ "signature": "99dbea9de7468dde3ab131a4c21f572fc19ff010730062451187de094abe9413",
+ "alternativeSignatures": [
+ "924682483adec7d5d020422beaa8a703b2070d04e0b368a6c1c9fb33f4c0f386"
+ ],
+ "memberOf": [
+ "default"
+ ],
+ "createdDate": "2025-11-10 15:00:51Z"
+ },
+ "f15c06eb6496f3eec4ecd667ae96476d7280d3691bee142a9e023b21d184cb7f": {
+ "signature": "f15c06eb6496f3eec4ecd667ae96476d7280d3691bee142a9e023b21d184cb7f",
+ "alternativeSignatures": [
+ "a5b6768732ae9dcb3c8619af98639a1442cf53e8980716d861c40a14d40bcfef"
+ ],
+ "memberOf": [
+ "default"
+ ],
+ "createdDate": "2025-11-10 15:00:51Z"
+ },
+ "3e5ff8a2e08d5e9a25ccaa7911b8cc758248fcc23ed7ff01d8f833294b2425dd": {
+ "signature": "3e5ff8a2e08d5e9a25ccaa7911b8cc758248fcc23ed7ff01d8f833294b2425dd",
+ "alternativeSignatures": [
+ "36b8101496f546de6416a5978c611cc5fe309f40977bf78652d73b41b2975ea5"
+ ],
+ "memberOf": [
+ "default"
+ ],
+ "createdDate": "2025-11-10 15:00:51Z"
+ },
+ "5e1c753e18bd472af64c82c71aee0dc83d0ddcb3a897522d120b707b56d47401": {
+ "signature": "5e1c753e18bd472af64c82c71aee0dc83d0ddcb3a897522d120b707b56d47401",
+ "alternativeSignatures": [
+ "099fe23e23d95c8f957773101e24a53187e6cf67ccd2ae3944e65fddf95cf3c2"
+ ],
+ "memberOf": [
+ "default"
+ ],
+ "createdDate": "2025-11-10 15:00:51Z"
+ },
+ "8636faecde898cdc690b9804ed240276ea631134588b99be21a509c3bcf8f5c6": {
+ "signature": "8636faecde898cdc690b9804ed240276ea631134588b99be21a509c3bcf8f5c6",
+ "alternativeSignatures": [
+ "3d4b23500b78a0f0c4365d5fe9dc9773b07a653b6154bc2ec6e3df1147058e9f"
+ ],
+ "memberOf": [
+ "default"
+ ],
+ "createdDate": "2025-11-10 15:00:51Z"
+ },
+ "83bd28e26677f06338e89530f916ac93bf0760f1ce328f1c3dd407863a74ad27": {
+ "signature": "83bd28e26677f06338e89530f916ac93bf0760f1ce328f1c3dd407863a74ad27",
+ "alternativeSignatures": [
+ "bf49ba09d629e0b78e7d4ee56afc7347a7ba0cb727fed893f53f09be4466ebb5"
+ ],
+ "memberOf": [
+ "default"
+ ],
+ "createdDate": "2025-11-10 15:00:51Z"
+ },
+ "5808b18c90fbe2874ded2e82d381b7fe425a5f472c4f123559923319de9adf44": {
+ "signature": "5808b18c90fbe2874ded2e82d381b7fe425a5f472c4f123559923319de9adf44",
+ "alternativeSignatures": [
+ "0cc5b7885e75304a9951f4b22666fcafbfe5aafba268c6bcfdada2ef4b35bcfc"
+ ],
+ "memberOf": [
+ "default"
+ ],
+ "createdDate": "2025-11-10 15:00:51Z"
+ },
+ "b4280c9ec7953fca7e333ae67821bb25616127bcaad96bb449fe2a777a2a754b": {
+ "signature": "b4280c9ec7953fca7e333ae67821bb25616127bcaad96bb449fe2a777a2a754b",
+ "alternativeSignatures": [
+ "0a6d7dc7d76c5ec589cdceaba4bce1c6c7c1b54582900f305a5f35bfb606ca3e"
+ ],
+ "memberOf": [
+ "default"
+ ],
+ "createdDate": "2025-11-10 15:00:51Z"
+ },
+ "9ebd52ffe5444d94809a5aaddfd754d8bce0085910516171b226a630f71a2cf6": {
+ "signature": "9ebd52ffe5444d94809a5aaddfd754d8bce0085910516171b226a630f71a2cf6",
+ "alternativeSignatures": [
+ "3b2519103c3722c7c8a7fb8c639a57ebb6884441638f7a9cdcb49d788987b902"
+ ],
+ "memberOf": [
+ "default"
+ ],
+ "createdDate": "2025-11-10 15:00:51Z"
+ },
+ "9a0821feaabde36ea784d6caad810916d21e950c4745162e04994faa5774fa3f": {
+ "signature": "9a0821feaabde36ea784d6caad810916d21e950c4745162e04994faa5774fa3f",
+ "alternativeSignatures": [
+ "5ee6cebbc49bb7e376d0776ea55cf64f16bf3006e82048ccb7b6bcc174bd88b4"
+ ],
+ "memberOf": [
+ "default"
+ ],
+ "createdDate": "2025-11-10 15:00:51Z"
+ },
+ "42007d4363dd45ea940c7a3dc4e76c13644982eb9d5879d89e7d6d79285b4be9": {
+ "signature": "42007d4363dd45ea940c7a3dc4e76c13644982eb9d5879d89e7d6d79285b4be9",
+ "alternativeSignatures": [
+ "a6571b410651c2e09642232ecb65d8212dd7106cd268c5a90d5e5a4e61ff178f"
+ ],
+ "memberOf": [
+ "default"
+ ],
+ "createdDate": "2025-11-10 15:00:51Z"
+ }
+ }
+}
\ No newline at end of file
diff --git a/.gdn/.gdnsuppress b/.gdn/.gdnsuppress
new file mode 100644
index 00000000..3a9012f0
--- /dev/null
+++ b/.gdn/.gdnsuppress
@@ -0,0 +1,300 @@
+{
+ "hydrated": false,
+ "properties": {
+ "helpUri": "https://eng.ms/docs/microsoft-security/security/azure-security/cloudai-security-fundamentals-engineering/security-integration/guardian-wiki/microsoft-guardian/general/suppressions"
+ },
+ "version": "1.0.0",
+ "suppressionSets": {
+ "default": {
+ "name": "default",
+ "createdDate": "2025-11-10 15:00:51Z",
+ "lastUpdatedDate": "2025-11-10 15:00:51Z"
+ }
+ },
+ "results": {
+ "aade958c0f923536ba575ebaaf1ce15a85f6c45b73e7785c2c15fb5a2c94408e": {
+ "signature": "aade958c0f923536ba575ebaaf1ce15a85f6c45b73e7785c2c15fb5a2c94408e",
+ "alternativeSignatures": [
+ "c59f521d29345c75983ad0e494c2e55e3a4c41ac35b7163da488a9f78c864f63"
+ ],
+ "memberOf": [
+ "default"
+ ],
+ "createdDate": "2025-11-10 15:00:51Z"
+ },
+ "a7d351fb49883535cfb307e2a4f77636ae5e54a94af99406f96d2558bd643edc": {
+ "signature": "a7d351fb49883535cfb307e2a4f77636ae5e54a94af99406f96d2558bd643edc",
+ "alternativeSignatures": [],
+ "memberOf": [
+ "default"
+ ],
+ "createdDate": "2025-11-10 15:00:51Z"
+ },
+ "1ba31ce1ab7a0b18ae9e504ad24c48f235eab0e6dcb3ad960a7a89b9c48b077a": {
+ "signature": "1ba31ce1ab7a0b18ae9e504ad24c48f235eab0e6dcb3ad960a7a89b9c48b077a",
+ "alternativeSignatures": [],
+ "memberOf": [
+ "default"
+ ],
+ "createdDate": "2025-11-10 15:00:51Z"
+ },
+ "f7e51f21d47b749dd39359b75955ad1c0cf382c0a78426bcb31539bc0a88374b": {
+ "signature": "f7e51f21d47b749dd39359b75955ad1c0cf382c0a78426bcb31539bc0a88374b",
+ "alternativeSignatures": [],
+ "memberOf": [
+ "default"
+ ],
+ "createdDate": "2025-11-10 15:00:51Z"
+ },
+ "57bee1c81911d2ba66861c1deebf33ec0ec5fa5d946666748017493ead017d53": {
+ "signature": "57bee1c81911d2ba66861c1deebf33ec0ec5fa5d946666748017493ead017d53",
+ "alternativeSignatures": [],
+ "memberOf": [
+ "default"
+ ],
+ "createdDate": "2025-11-10 15:00:51Z"
+ },
+ "278585c30d0968e80928c1d86455aa32481e0b97b0cdbba1f20073e70398a0b8": {
+ "signature": "278585c30d0968e80928c1d86455aa32481e0b97b0cdbba1f20073e70398a0b8",
+ "alternativeSignatures": [],
+ "memberOf": [
+ "default"
+ ],
+ "createdDate": "2025-11-10 15:00:51Z"
+ },
+ "974a35997c6b2cdbb802ee711e2265e93f2f788f7ab976c05fbf7894e9248855": {
+ "signature": "974a35997c6b2cdbb802ee711e2265e93f2f788f7ab976c05fbf7894e9248855",
+ "alternativeSignatures": [],
+ "memberOf": [
+ "default"
+ ],
+ "createdDate": "2025-11-10 15:00:51Z"
+ },
+ "6064d60cf011d4ef6771441256423be8099dafb8d1f11cc066365115c18f51ab": {
+ "signature": "6064d60cf011d4ef6771441256423be8099dafb8d1f11cc066365115c18f51ab",
+ "alternativeSignatures": [],
+ "memberOf": [
+ "default"
+ ],
+ "createdDate": "2025-11-10 15:00:51Z"
+ },
+ "6b32b6a40b729abe443c04556b5a1c8fdcbbd27f1b6ae1d0d44ac75fa0dd38d5": {
+ "signature": "6b32b6a40b729abe443c04556b5a1c8fdcbbd27f1b6ae1d0d44ac75fa0dd38d5",
+ "alternativeSignatures": [],
+ "memberOf": [
+ "default"
+ ],
+ "createdDate": "2025-11-10 15:00:51Z"
+ },
+ "8ef0a26f4366de0ec76cc6e929cceae58295937b3dce9d31471657091c9c9986": {
+ "signature": "8ef0a26f4366de0ec76cc6e929cceae58295937b3dce9d31471657091c9c9986",
+ "alternativeSignatures": [],
+ "memberOf": [
+ "default"
+ ],
+ "createdDate": "2025-11-10 15:00:51Z"
+ },
+ "f1fa10a58cac2aca8946aba45e4a1d10f8ef6b86b433ed49b58910d3205149cc": {
+ "signature": "f1fa10a58cac2aca8946aba45e4a1d10f8ef6b86b433ed49b58910d3205149cc",
+ "alternativeSignatures": [],
+ "memberOf": [
+ "default"
+ ],
+ "createdDate": "2025-11-10 15:00:51Z"
+ },
+ "39c0c5997e05cc2c4bbd182acf975698088e87d358e196008147ffafde9f43e2": {
+ "signature": "39c0c5997e05cc2c4bbd182acf975698088e87d358e196008147ffafde9f43e2",
+ "alternativeSignatures": [],
+ "memberOf": [
+ "default"
+ ],
+ "createdDate": "2025-11-10 15:00:51Z"
+ },
+ "097d40852758d2660cdc7865c1b9cb638ec9165685773916e960efca725bb6cd": {
+ "signature": "097d40852758d2660cdc7865c1b9cb638ec9165685773916e960efca725bb6cd",
+ "alternativeSignatures": [],
+ "memberOf": [
+ "default"
+ ],
+ "createdDate": "2025-11-10 15:00:51Z"
+ },
+ "9f83def0a176d3aa7dc47f6443ab546ba717e2b16a552e229784b171a18e55f5": {
+ "signature": "9f83def0a176d3aa7dc47f6443ab546ba717e2b16a552e229784b171a18e55f5",
+ "alternativeSignatures": [],
+ "memberOf": [
+ "default"
+ ],
+ "createdDate": "2025-11-10 15:00:51Z"
+ },
+ "86966d5f6215bf5ae8c1b4d894caa6b69cc678374ab7a2321695dca35fc55923": {
+ "signature": "86966d5f6215bf5ae8c1b4d894caa6b69cc678374ab7a2321695dca35fc55923",
+ "alternativeSignatures": [
+ "4c8f75669e65355d034fcd3be56ebf462134e0ff2fec2605d04bccdb36e68111"
+ ],
+ "memberOf": [
+ "default"
+ ],
+ "createdDate": "2025-11-10 15:00:51Z"
+ },
+ "d07377aee65d4515741765e830ea055dfe6df987f8f2f6399dfff1b6928115f5": {
+ "signature": "d07377aee65d4515741765e830ea055dfe6df987f8f2f6399dfff1b6928115f5",
+ "alternativeSignatures": [
+ "c0bcaaad531041aae4bc6bd88f452c845de3fb2b3825ab9b7ed1282cf2c548dd"
+ ],
+ "memberOf": [
+ "default"
+ ],
+ "createdDate": "2025-11-10 15:00:51Z"
+ },
+ "bb10304b655f6008876c0671e0e8c73a858fc040867f340464dfc479cd9c3ba9": {
+ "signature": "bb10304b655f6008876c0671e0e8c73a858fc040867f340464dfc479cd9c3ba9",
+ "alternativeSignatures": [
+ "ee06cd1fcac7607b9f9103d3572ae7468bb3c43350639c2798a91017851442ed"
+ ],
+ "memberOf": [
+ "default"
+ ],
+ "createdDate": "2025-11-10 15:00:51Z"
+ },
+ "7df253f960bd38300d111d29e106cd8c4fbdcb1d9e1420b8f8b5efa702cc0d6b": {
+ "signature": "7df253f960bd38300d111d29e106cd8c4fbdcb1d9e1420b8f8b5efa702cc0d6b",
+ "alternativeSignatures": [
+ "9f54994c0e212ec81244442d324a11d5bc2b20233eeef67e251767186fd0743e"
+ ],
+ "memberOf": [
+ "default"
+ ],
+ "createdDate": "2025-11-10 15:00:51Z"
+ },
+ "bd9c1992728d9d1798329af6f6dc8ae44d7058a7d8f15b9001c009200ec0aaa3": {
+ "signature": "bd9c1992728d9d1798329af6f6dc8ae44d7058a7d8f15b9001c009200ec0aaa3",
+ "alternativeSignatures": [
+ "1bb6c80c485a4385f09c8fe2ecd7f65b310fcbbc9987456db0c9372f2f9c479d"
+ ],
+ "memberOf": [
+ "default"
+ ],
+ "createdDate": "2025-11-10 15:00:51Z"
+ },
+ "e8040349a51b39e6f9eb478d16128184865096ad79e35f1687e8f36bce9d0021": {
+ "signature": "e8040349a51b39e6f9eb478d16128184865096ad79e35f1687e8f36bce9d0021",
+ "alternativeSignatures": [
+ "7ac989754684da6e6398df0fa8e9b38e63d43f536098574e98f8d82f987c9e64"
+ ],
+ "memberOf": [
+ "default"
+ ],
+ "createdDate": "2025-11-10 15:00:51Z"
+ },
+ "99dbea9de7468dde3ab131a4c21f572fc19ff010730062451187de094abe9413": {
+ "signature": "99dbea9de7468dde3ab131a4c21f572fc19ff010730062451187de094abe9413",
+ "alternativeSignatures": [
+ "924682483adec7d5d020422beaa8a703b2070d04e0b368a6c1c9fb33f4c0f386"
+ ],
+ "memberOf": [
+ "default"
+ ],
+ "createdDate": "2025-11-10 15:00:51Z"
+ },
+ "f15c06eb6496f3eec4ecd667ae96476d7280d3691bee142a9e023b21d184cb7f": {
+ "signature": "f15c06eb6496f3eec4ecd667ae96476d7280d3691bee142a9e023b21d184cb7f",
+ "alternativeSignatures": [
+ "a5b6768732ae9dcb3c8619af98639a1442cf53e8980716d861c40a14d40bcfef"
+ ],
+ "memberOf": [
+ "default"
+ ],
+ "createdDate": "2025-11-10 15:00:51Z"
+ },
+ "3e5ff8a2e08d5e9a25ccaa7911b8cc758248fcc23ed7ff01d8f833294b2425dd": {
+ "signature": "3e5ff8a2e08d5e9a25ccaa7911b8cc758248fcc23ed7ff01d8f833294b2425dd",
+ "alternativeSignatures": [
+ "36b8101496f546de6416a5978c611cc5fe309f40977bf78652d73b41b2975ea5"
+ ],
+ "memberOf": [
+ "default"
+ ],
+ "createdDate": "2025-11-10 15:00:51Z"
+ },
+ "5e1c753e18bd472af64c82c71aee0dc83d0ddcb3a897522d120b707b56d47401": {
+ "signature": "5e1c753e18bd472af64c82c71aee0dc83d0ddcb3a897522d120b707b56d47401",
+ "alternativeSignatures": [
+ "099fe23e23d95c8f957773101e24a53187e6cf67ccd2ae3944e65fddf95cf3c2"
+ ],
+ "memberOf": [
+ "default"
+ ],
+ "createdDate": "2025-11-10 15:00:51Z"
+ },
+ "8636faecde898cdc690b9804ed240276ea631134588b99be21a509c3bcf8f5c6": {
+ "signature": "8636faecde898cdc690b9804ed240276ea631134588b99be21a509c3bcf8f5c6",
+ "alternativeSignatures": [
+ "3d4b23500b78a0f0c4365d5fe9dc9773b07a653b6154bc2ec6e3df1147058e9f"
+ ],
+ "memberOf": [
+ "default"
+ ],
+ "createdDate": "2025-11-10 15:00:51Z"
+ },
+ "83bd28e26677f06338e89530f916ac93bf0760f1ce328f1c3dd407863a74ad27": {
+ "signature": "83bd28e26677f06338e89530f916ac93bf0760f1ce328f1c3dd407863a74ad27",
+ "alternativeSignatures": [
+ "bf49ba09d629e0b78e7d4ee56afc7347a7ba0cb727fed893f53f09be4466ebb5"
+ ],
+ "memberOf": [
+ "default"
+ ],
+ "createdDate": "2025-11-10 15:00:51Z"
+ },
+ "5808b18c90fbe2874ded2e82d381b7fe425a5f472c4f123559923319de9adf44": {
+ "signature": "5808b18c90fbe2874ded2e82d381b7fe425a5f472c4f123559923319de9adf44",
+ "alternativeSignatures": [
+ "0cc5b7885e75304a9951f4b22666fcafbfe5aafba268c6bcfdada2ef4b35bcfc"
+ ],
+ "memberOf": [
+ "default"
+ ],
+ "createdDate": "2025-11-10 15:00:51Z"
+ },
+ "b4280c9ec7953fca7e333ae67821bb25616127bcaad96bb449fe2a777a2a754b": {
+ "signature": "b4280c9ec7953fca7e333ae67821bb25616127bcaad96bb449fe2a777a2a754b",
+ "alternativeSignatures": [
+ "0a6d7dc7d76c5ec589cdceaba4bce1c6c7c1b54582900f305a5f35bfb606ca3e"
+ ],
+ "memberOf": [
+ "default"
+ ],
+ "createdDate": "2025-11-10 15:00:51Z"
+ },
+ "9ebd52ffe5444d94809a5aaddfd754d8bce0085910516171b226a630f71a2cf6": {
+ "signature": "9ebd52ffe5444d94809a5aaddfd754d8bce0085910516171b226a630f71a2cf6",
+ "alternativeSignatures": [
+ "3b2519103c3722c7c8a7fb8c639a57ebb6884441638f7a9cdcb49d788987b902"
+ ],
+ "memberOf": [
+ "default"
+ ],
+ "createdDate": "2025-11-10 15:00:51Z"
+ },
+ "9a0821feaabde36ea784d6caad810916d21e950c4745162e04994faa5774fa3f": {
+ "signature": "9a0821feaabde36ea784d6caad810916d21e950c4745162e04994faa5774fa3f",
+ "alternativeSignatures": [
+ "5ee6cebbc49bb7e376d0776ea55cf64f16bf3006e82048ccb7b6bcc174bd88b4"
+ ],
+ "memberOf": [
+ "default"
+ ],
+ "createdDate": "2025-11-10 15:00:51Z"
+ },
+ "42007d4363dd45ea940c7a3dc4e76c13644982eb9d5879d89e7d6d79285b4be9": {
+ "signature": "42007d4363dd45ea940c7a3dc4e76c13644982eb9d5879d89e7d6d79285b4be9",
+ "alternativeSignatures": [
+ "a6571b410651c2e09642232ecb65d8212dd7106cd268c5a90d5e5a4e61ff178f"
+ ],
+ "memberOf": [
+ "default"
+ ],
+ "createdDate": "2025-11-10 15:00:51Z"
+ }
+ }
+}
\ No newline at end of file
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 836a0a79..4288fcb5 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -39,4 +39,4 @@ All pull requests must include:
- **Meaningful Summary**: Include a clear description of your changes under the "### Summary" section in the PR description (minimum 10 characters)
- **Issue/Work Item Link** (only one required):
- External contributors: Link to a GitHub issue
- - Microsoft org members: Link to an ADO work item
\ No newline at end of file
+ - Microsoft org members: Link to an ADO work item
diff --git a/OneBranchPipelines/build-release-package-pipeline.yml b/OneBranchPipelines/build-release-package-pipeline.yml
new file mode 100644
index 00000000..6567e7b0
--- /dev/null
+++ b/OneBranchPipelines/build-release-package-pipeline.yml
@@ -0,0 +1,291 @@
+# OneBranch Pipeline for mssql-python
+# Builds Python wheels for Windows, macOS, and Linux with security compliance
+
+name: $(Year:YY)$(DayOfYear)$(Rev:.r)
+
+# Pipeline triggers
+trigger:
+ branches:
+ include:
+ - main
+
+pr:
+ branches:
+ include:
+ - main
+
+# Schedule the pipeline to run on main branch daily at 07:00 AM IST
+schedules:
+ - cron: "30 1 * * *"
+ displayName: Daily run at 07:00 AM IST
+ branches:
+ include:
+ - main
+ always: true
+
+# Parameters for pipeline behavior
+parameters:
+ - name: oneBranchType
+ displayName: 'OneBranch Template Type'
+ type: string
+ values:
+ - 'Official'
+ - 'NonOfficial'
+ default: 'NonOfficial'
+
+ - name: buildConfiguration
+ displayName: 'Build Configuration'
+ type: string
+ values:
+ - 'Release'
+ - 'Debug'
+ default: 'Release'
+
+ - name: runSdlTasks
+ displayName: 'Run SDL Security Tasks'
+ type: boolean
+ default: true
+
+ - name: signingEnabled
+ displayName: 'Enable Code Signing (ESRP)'
+ type: boolean
+ default: true
+
+ - name: packageVersion
+ displayName: 'Package Version (e.g., 0.13.0)'
+ type: string
+ default: '0.13.0'
+
+ # Configuration matrices for each platform
+ - name: windowsConfigs
+ type: object
+ default:
+ - pyVer: '310'
+ arch: 'x64'
+ - pyVer: '311'
+ arch: 'x64'
+ - pyVer: '312'
+ arch: 'x64'
+ - pyVer: '313'
+ arch: 'x64'
+ - pyVer: '311'
+ arch: 'arm64'
+ - pyVer: '312'
+ arch: 'arm64'
+ - pyVer: '313'
+ arch: 'arm64'
+
+ - name: macosConfigs
+ type: object
+ default:
+ - pyVer: '310'
+ - pyVer: '311'
+ - pyVer: '312'
+ - pyVer: '313'
+
+ - name: linuxConfigs
+ type: object
+ default:
+ - { tag: 'manylinux', arch: 'x86_64', platform: 'linux/amd64' }
+ - { tag: 'manylinux', arch: 'aarch64', platform: 'linux/arm64' }
+ - { tag: 'musllinux', arch: 'x86_64', platform: 'linux/amd64' }
+ - { tag: 'musllinux', arch: 'aarch64', platform: 'linux/arm64' }
+
+# Variable templates
+variables:
+ # Set package version from parameter
+ - name: PACKAGE_VERSION
+ value: '${{ parameters.packageVersion }}'
+ readonly: true
+ # Set package version from parameter
+ - name: PACKAGE_VERSION
+ value: '${{ parameters.packageVersion }}'
+ readonly: true
+
+ # Alias for SDL tools (compile-time)
+ - name: packageVersion
+ value: '${{ parameters.packageVersion }}'
+ readonly: true
+
+ - template: /OneBranchPipelines/variables/common-variables.yml@self
+ - template: /OneBranchPipelines/variables/onebranch-variables.yml@self
+ - template: /OneBranchPipelines/variables/build-variables.yml@self
+ - template: /OneBranchPipelines/variables/signing-variables.yml@self
+ - template: /OneBranchPipelines/variables/symbol-variables.yml@self
+ # Variable groups
+ - group: 'ESRP Federated Creds (AME)' # Contains ESRP signing credentials
+
+# OneBranch resources
+resources:
+ repositories:
+ - repository: templates
+ type: git
+ name: 'OneBranch.Pipelines/GovernedTemplates'
+ ref: 'refs/heads/main'
+
+# Extend OneBranch official template
+extends:
+ template: 'v2/OneBranch.${{ parameters.oneBranchType }}.CrossPlat.yml@templates'
+
+ parameters:
+ # Container definitions
+ # Note: All jobs use custom 1ES pools or Microsoft-hosted agents
+ # Windows: Django-1ES-pool + WIN22-SQL22 (custom 1ES)
+ # Linux: Django-1ES-pool + ADO-UB22-SQL22 (custom 1ES)
+ # macOS: Azure Pipelines + macOS-14 (Microsoft-hosted)
+
+ # Feature flags
+ featureFlags:
+ WindowsHostVersion:
+ Version: '2022'
+ binskimScanAllExtensions: true # Enable scanning of all supported file types including .pyd
+
+ # Global SDL Configuration
+ # See: https://aka.ms/obpipelines/sdl
+ globalSdl:
+ # Global Guardian baseline and suppression files
+ baseline:
+ baselineFile: $(Build.SourcesDirectory)\.gdn\.gdnbaselines
+ suppressionSet: default
+ suppression:
+ suppressionFile: $(Build.SourcesDirectory)\.gdn\.gdnsuppress
+ suppressionSet: default
+
+ # ApiScan - Scans APIs for security vulnerabilities
+ # Disabled: Not applicable to Python wheel distribution model
+ apiscan:
+ enabled: false
+ justificationForDisabling: 'APIScan requires PDB symbols for native Windows DLLs. Python wheels primarily contain .pyd files and Python code, better covered by BinSkim. JDBC team also has APIScan disabled for similar reasons.'
+
+ # Armory - Security scanning for binaries
+ armory:
+ enabled: ${{ parameters.runSdlTasks }}
+ break: true
+
+ # AsyncSdl - Asynchronous SDL tasks
+ asyncSdl:
+ enabled: false
+
+ # BinSkim - Binary analyzer for security issues
+ binskim:
+ enabled: ${{ parameters.runSdlTasks }}
+ break: true
+ # Scan all binary types: .pyd (Python), .dll/.exe (Windows), .so (Linux), .dylib (macOS)
+ analyzeTarget: '$(Build.SourcesDirectory)/**/*.{pyd,dll,exe,so,dylib}'
+ analyzeRecurse: true
+ logFile: '$(Build.ArtifactStagingDirectory)/BinSkimResults.sarif'
+
+ # CodeInspector - Source code security analysis
+ codeinspector:
+ enabled: ${{ parameters.runSdlTasks }}
+ logLevel: Error
+
+ # CodeQL - Semantic code analysis
+ codeql:
+ enabled: ${{ parameters.runSdlTasks }}
+ language: 'python,cpp'
+ sourceRoot: '$(REPO_ROOT)'
+ querySuite: security-extended
+
+ # CredScan - Scans for credentials in code
+ # Note: Global baseline/suppression files configured at globalSdl level
+ credscan:
+ enabled: ${{ parameters.runSdlTasks }}
+
+ # ESLint - JavaScript/TypeScript specific, not applicable for Python
+ eslint:
+ enabled: false
+
+ # PoliCheck - Checks for politically incorrect terms
+ policheck:
+ enabled: ${{ parameters.runSdlTasks }}
+ break: true
+ exclusionFile: '$(REPO_ROOT)/.config/PolicheckExclusions.xml'
+
+ # Roslyn Analyzers - .NET-specific, not applicable for Python
+ roslyn:
+ enabled: false
+
+ # Publish SDL logs
+ publishLogs:
+ enabled: ${{ parameters.runSdlTasks }}
+
+ # SBOM - Software Bill of Materials
+ sbom:
+ enabled: ${{ parameters.runSdlTasks }}
+ packageName: 'mssql-python'
+ packageVersion: '${{ variables.packageVersion }}'
+
+ # TSA - Threat and Security Assessment (Official builds only)
+ tsa:
+ enabled: ${{ and(eq(parameters.oneBranchType, 'Official'), parameters.runSdlTasks) }}
+ configFile: '$(REPO_ROOT)/.config/tsaoptions.json'
+
+ # Pipeline stages
+ stages:
+ # Windows stages - one per Python version/architecture combination
+ - ${{ each config in parameters.windowsConfigs }}:
+ - template: /OneBranchPipelines/stages/build-windows-single-stage.yml@self
+ parameters:
+ stageName: Win_py${{ config.pyVer }}_${{ config.arch }}
+ jobName: BuildWheel
+ pythonVersion: ${{ format('{0}.{1}', substring(config.pyVer, 0, 1), substring(config.pyVer, 1, 2)) }}
+ shortPyVer: ${{ config.pyVer }}
+ architecture: ${{ config.arch }}
+ oneBranchType: '${{ parameters.oneBranchType }}'
+ signingEnabled: '${{ parameters.signingEnabled }}'
+ buildConfiguration: '${{ parameters.buildConfiguration }}'
+
+ # macOS stages - one per Python version (universal2 binaries)
+ - ${{ each config in parameters.macosConfigs }}:
+ - template: /OneBranchPipelines/stages/build-macos-single-stage.yml@self
+ parameters:
+ stageName: MacOS_py${{ config.pyVer }}
+ jobName: BuildWheel
+ pythonVersion: ${{ format('{0}.{1}', substring(config.pyVer, 0, 1), substring(config.pyVer, 1, 2)) }}
+ shortPyVer: ${{ config.pyVer }}
+ oneBranchType: '${{ parameters.oneBranchType }}'
+ signingEnabled: '${{ parameters.signingEnabled }}'
+ buildConfiguration: '${{ parameters.buildConfiguration }}'
+
+ # Linux stages - one per distribution/architecture (builds all Python versions inside)
+ - ${{ each config in parameters.linuxConfigs }}:
+ - template: /OneBranchPipelines/stages/build-linux-single-stage.yml@self
+ parameters:
+ stageName: Linux_${{ config.tag }}_${{ config.arch }}
+ jobName: BuildWheels
+ linuxTag: ${{ config.tag }}
+ arch: ${{ config.arch }}
+ dockerPlatform: ${{ config.platform }}
+ oneBranchType: '${{ parameters.oneBranchType }}'
+ signingEnabled: '${{ parameters.signingEnabled }}'
+ buildConfiguration: '${{ parameters.buildConfiguration }}'
+
+ # Consolidate all artifacts into single dist/ folder
+ - stage: Consolidate
+ displayName: 'Consolidate All Artifacts'
+ dependsOn:
+ # Windows dependencies
+ - Win_py310_x64
+ - Win_py311_x64
+ - Win_py312_x64
+ - Win_py313_x64
+ - Win_py311_arm64
+ - Win_py312_arm64
+ - Win_py313_arm64
+ # macOS dependencies
+ - MacOS_py310
+ - MacOS_py311
+ - MacOS_py312
+ - MacOS_py313
+ # Linux dependencies
+ - Linux_manylinux_x86_64
+ - Linux_manylinux_aarch64
+ - Linux_musllinux_x86_64
+ - Linux_musllinux_aarch64
+ jobs:
+ - template: /OneBranchPipelines/jobs/consolidate-artifacts-job.yml@self
+ parameters:
+ oneBranchType: '${{ parameters.oneBranchType }}'
+
+ # Note: Symbol publishing is now handled directly in the Windows build stages
diff --git a/OneBranchPipelines/dummy-release-pipeline.yml b/OneBranchPipelines/dummy-release-pipeline.yml
new file mode 100644
index 00000000..b920abc6
--- /dev/null
+++ b/OneBranchPipelines/dummy-release-pipeline.yml
@@ -0,0 +1,331 @@
+# OneBranch DUMMY/TEST Release Pipeline for mssql-python
+# ⚠️ THIS IS A TEST PIPELINE - NOT FOR PRODUCTION RELEASES ⚠️
+# Downloads wheel and symbol artifacts from build pipeline, publishes symbols, and performs dummy ESRP release for testing
+# Uses Maven ContentType instead of PyPI to avoid accidental production releases
+# This pipeline is ALWAYS Official - no NonOfficial option
+
+name: $(Year:YY)$(DayOfYear)$(Rev:.r)-Dummy-Release
+
+# Manual trigger only - releases should be deliberate
+trigger: none
+pr: none
+
+# Parameters for DUMMY release pipeline
+parameters:
+ - name: packageVersion
+ displayName: '[TEST] Package Version (e.g., 0.13.0)'
+ type: string
+ default: '0.13.0'
+
+ - name: publishSymbols
+ displayName: '[TEST] Publish Symbols to Symbol Servers'
+ type: boolean
+ default: true
+
+ - name: performDummyRelease
+ displayName: '[TEST] Perform Dummy ESRP Release (Maven - NOT PyPI)'
+ type: boolean
+ default: true # Safe to enable - uses Maven ContentType for testing
+
+# Variables
+variables:
+ - name: PACKAGE_VERSION
+ value: '${{ parameters.packageVersion }}'
+ readonly: true
+
+ - name: packageVersion
+ value: '${{ parameters.packageVersion }}'
+ readonly: true
+
+ # Common variables
+ - template: /OneBranchPipelines/variables/common-variables.yml@self
+ - template: /OneBranchPipelines/variables/onebranch-variables.yml@self
+
+ # Variable groups
+ - group: 'ESRP Federated Creds (AME)' # Contains ESRP signing credentials
+ - group: 'Symbols Publishing' # Contains SymbolServer, SymbolTokenUri variables
+
+# OneBranch resources
+resources:
+ repositories:
+ - repository: templates
+ type: git
+ name: 'OneBranch.Pipelines/GovernedTemplates'
+ ref: 'refs/heads/main'
+
+ # Reference to the build pipeline
+ pipelines:
+ - pipeline: buildPipeline
+ source: 'Build-Release-Package-Pipeline' # Name of the build pipeline
+ trigger: none # Manual trigger only
+
+# Extend OneBranch official template
+# Always uses Official template for release pipeline
+extends:
+ template: 'v2/OneBranch.Official.CrossPlat.yml@templates'
+
+ parameters:
+ # Feature flags
+ featureFlags:
+ WindowsHostVersion:
+ Version: '2022'
+
+ # Global SDL Configuration
+ globalSdl:
+ # Global Guardian baseline and suppression files
+ baseline:
+ baselineFile: $(Build.SourcesDirectory)\.gdn\.gdnbaselines
+ suppressionSet: default
+ suppression:
+ suppressionFile: $(Build.SourcesDirectory)\.gdn\.gdnsuppress
+ suppressionSet: default
+
+ # Minimal SDL for release pipeline - artifacts already scanned during build
+ binskim:
+ enabled: true
+ break: true
+
+ credscan:
+ enabled: true
+
+ policheck:
+ enabled: true
+ break: true
+ exclusionFile: '$(REPO_ROOT)/.config/PolicheckExclusions.xml'
+
+ # Publish SDL logs
+ publishLogs:
+ enabled: true
+
+ # TSA - Always enabled for Official release pipeline
+ tsa:
+ enabled: true
+ configFile: '$(REPO_ROOT)/.config/tsaoptions.json'
+
+ # Pipeline stages
+ stages:
+ - stage: TestReleasePackages
+ displayName: '[TEST] Dummy Release - Testing ESRP Workflow'
+
+ jobs:
+ - job: DownloadAndTestRelease
+ displayName: '[TEST] Download Artifacts and Perform Dummy Release'
+
+ pool:
+ type: windows
+ isCustom: true
+ name: Django-1ES-pool
+ vmImage: WIN22-SQL22
+
+ variables:
+ ob_outputDirectory: '$(Build.ArtifactStagingDirectory)'
+
+ steps:
+ # Step 1: Download consolidated artifacts from build pipeline
+ - task: DownloadPipelineArtifact@2
+ displayName: '[TEST] Download Consolidated Artifacts from Build Pipeline'
+ inputs:
+ buildType: 'specific'
+ project: '$(System.TeamProject)'
+ definition: 2199 # Build-Release-Package-Pipeline definition ID
+ buildVersionToDownload: 'specific'
+ buildId: $(resources.pipeline.buildPipeline.runID) # Use the build run selected in UI
+ artifactName: 'drop_Consolidate_ConsolidateArtifacts' # Consolidated artifact with dist/ and symbols/
+ targetPath: '$(Build.SourcesDirectory)/artifacts'
+
+ # Step 3: List downloaded artifacts for verification
+ - task: PowerShell@2
+ displayName: '[TEST] List Downloaded Wheel and Symbol Files'
+ inputs:
+ targetType: 'inline'
+ script: |
+ Write-Host "====================================="
+ Write-Host "[TEST PIPELINE] Downloaded Artifacts:"
+ Write-Host "====================================="
+
+ # List wheel files
+ $wheelsPath = "$(Build.SourcesDirectory)/artifacts/dist"
+ if (Test-Path $wheelsPath) {
+ $wheels = Get-ChildItem -Path $wheelsPath -Filter "*.whl" -Recurse
+
+ Write-Host "`n[WHEELS] Total wheel files found: $($wheels.Count)"
+ foreach ($wheel in $wheels) {
+ $size = [math]::Round($wheel.Length / 1MB, 2)
+ Write-Host " - $($wheel.Name) (${size} MB)"
+ }
+
+ # Copy wheels to dist folder for ESRP
+ Write-Host "`nCopying wheels to $(Build.SourcesDirectory)/dist..."
+ New-Item -ItemType Directory -Force -Path "$(Build.SourcesDirectory)/dist" | Out-Null
+ Copy-Item -Path "$wheelsPath/*.whl" -Destination "$(Build.SourcesDirectory)/dist/" -Force
+
+ } else {
+ Write-Error "Wheel directory not found at: $wheelsPath"
+ exit 1
+ }
+
+ # List symbol files
+ $symbolsPath = "$(Build.SourcesDirectory)/artifacts/symbols"
+ if (Test-Path $symbolsPath) {
+ $symbols = Get-ChildItem -Path $symbolsPath -Filter "*.pdb" -Recurse
+
+ Write-Host "`n[SYMBOLS] Total PDB files found: $($symbols.Count)"
+ foreach ($symbol in $symbols) {
+ $size = [math]::Round($symbol.Length / 1KB, 2)
+ Write-Host " - $($symbol.Name) (${size} KB)"
+ }
+
+ # Copy symbols to symbols folder for publishing
+ Write-Host "`nCopying symbols to $(Build.SourcesDirectory)/symbols..."
+ New-Item -ItemType Directory -Force -Path "$(Build.SourcesDirectory)/symbols" | Out-Null
+ Copy-Item -Path "$symbolsPath/*.pdb" -Destination "$(Build.SourcesDirectory)/symbols/" -Force
+
+ } else {
+ Write-Warning "Symbol directory not found at: $symbolsPath"
+ Write-Warning "Symbol publishing will be skipped if no PDB files found"
+ }
+
+ Write-Host "`n====================================="
+ Write-Host "Summary:"
+ Write-Host "Wheels: $($wheels.Count) files"
+ Write-Host "Symbols: $(if ($symbols) { $symbols.Count } else { 0 }) files"
+ Write-Host "====================================="
+
+ # Step 4: Verify wheel integrity
+ - task: PowerShell@2
+ displayName: '[TEST] Verify Wheel Integrity'
+ inputs:
+ targetType: 'inline'
+ script: |
+ Write-Host "[TEST] Verifying wheel file integrity..."
+
+ $wheels = Get-ChildItem -Path "$(Build.SourcesDirectory)/dist" -Filter "*.whl"
+ $allValid = $true
+
+ foreach ($wheel in $wheels) {
+ # Check if wheel is a valid ZIP file
+ try {
+ Add-Type -AssemblyName System.IO.Compression.FileSystem
+ $zip = [System.IO.Compression.ZipFile]::OpenRead($wheel.FullName)
+ $entryCount = $zip.Entries.Count
+ $zip.Dispose()
+
+ Write-Host "✓ $($wheel.Name) - Valid ($entryCount entries)"
+ }
+ catch {
+ Write-Error "✗ $($wheel.Name) - INVALID: $_"
+ $allValid = $false
+ }
+ }
+
+ if (-not $allValid) {
+ Write-Error "One or more wheel files are corrupted"
+ exit 1
+ }
+
+ Write-Host "`nAll wheels verified successfully!"
+
+ # Step 5: Publish Symbols (if enabled and symbols exist)
+ - ${{ if eq(parameters.publishSymbols, true) }}:
+ - template: /OneBranchPipelines/steps/symbol-publishing-step.yml@self
+ parameters:
+ SymbolsFolder: '$(Build.SourcesDirectory)/symbols'
+
+ # Step 6: Copy wheels to ob_outputDirectory for OneBranch artifact publishing
+ - task: CopyFiles@2
+ displayName: '[TEST] Stage Wheels for Dummy Release'
+ inputs:
+ SourceFolder: '$(Build.SourcesDirectory)/dist'
+ Contents: '*.whl'
+ TargetFolder: '$(ob_outputDirectory)/release'
+ flattenFolders: true
+
+ # Step 7: ESRP Dummy Release Task (only if performDummyRelease is true)
+ # ⚠️ IMPORTANT: Uses Maven ContentType for testing - NOT PyPI!
+ - ${{ if eq(parameters.performDummyRelease, true) }}:
+ - task: EsrpRelease@9
+ displayName: '[TEST] ESRP Dummy Release (Maven - NOT PyPI)'
+ inputs:
+ connectedservicename: '$(ESRPConnectedServiceName)'
+ usemanagedidentity: true
+ keyvaultname: '$(AuthAKVName)'
+ signcertname: '$(AuthSignCertName)'
+ clientid: '$(EsrpClientId)'
+ Intent: 'PackageDistribution'
+ # ⚠️ CRITICAL: ContentType is Maven (NOT PyPI) for safe testing
+ # This ensures no accidental production releases to PyPI
+ ContentType: 'Maven'
+ ContentSource: 'Folder'
+ FolderLocation: '$(Build.SourcesDirectory)/dist'
+ WaitForReleaseCompletion: true
+ Owners: '$(owner)'
+ Approvers: '$(approver)'
+ ServiceEndpointUrl: 'https://api.esrp.microsoft.com'
+ MainPublisher: 'ESRPRELPACMAN'
+ DomainTenantId: '$(DomainTenantId)'
+
+ # Step 8: Show test release status
+ - ${{ if eq(parameters.performDummyRelease, true) }}:
+ - task: PowerShell@2
+ displayName: '[TEST] Dummy Release Summary'
+ inputs:
+ targetType: 'inline'
+ script: |
+ Write-Host "====================================="
+ Write-Host "⚠️ TEST PIPELINE - DUMMY RELEASE COMPLETED ⚠️"
+ Write-Host "====================================="
+ Write-Host "Package: mssql-python (TEST)"
+ Write-Host "Version: ${{ parameters.packageVersion }}"
+ Write-Host "ContentType: Maven (NOT PyPI - Safe for Testing)"
+ Write-Host "Owners: $(owner)"
+ Write-Host "Approvers: $(approver)"
+ Write-Host "Symbols Published: ${{ parameters.publishSymbols }}"
+ Write-Host "====================================="
+ Write-Host ""
+ Write-Host "⚠️ IMPORTANT: This was a DUMMY release using Maven ContentType"
+ Write-Host " NO packages were released to PyPI"
+ Write-Host ""
+ Write-Host "What was tested:"
+ Write-Host "✓ Artifact download from build pipeline"
+ Write-Host "✓ Wheel integrity verification"
+ if ("${{ parameters.publishSymbols }}" -eq "True") {
+ Write-Host "✓ Symbol publishing to SqlClientDrivers org"
+ }
+ Write-Host "✓ ESRP release workflow (Maven ContentType)"
+ Write-Host ""
+ Write-Host "Next steps:"
+ Write-Host "1. Verify dummy release in ESRP portal"
+ Write-Host "2. Check ESRP approval workflow completion"
+ Write-Host "3. Verify symbols in SqlClientDrivers org (if published)"
+ Write-Host "4. For PRODUCTION release, use official-release-pipeline.yml"
+ Write-Host "====================================="
+
+ - ${{ if eq(parameters.performDummyRelease, false) }}:
+ - task: PowerShell@2
+ displayName: '[TEST] Dry Run - Dummy Release Skipped'
+ inputs:
+ targetType: 'inline'
+ script: |
+ Write-Host "====================================="
+ Write-Host "⚠️ TEST PIPELINE - DRY RUN MODE ⚠️"
+ Write-Host "====================================="
+ Write-Host "Package: mssql-python (TEST)"
+ Write-Host "Version: ${{ parameters.packageVersion }}"
+ Write-Host ""
+ Write-Host "Actions performed:"
+ Write-Host "✓ Downloaded wheels from build pipeline"
+ Write-Host "✓ Verified wheel integrity"
+ Write-Host "✓ Downloaded symbols from build pipeline"
+ if ("${{ parameters.publishSymbols }}" -eq "True") {
+ Write-Host "✓ Published symbols to SqlClientDrivers org"
+ }
+ Write-Host "✗ ESRP dummy release NOT performed (parameter disabled)"
+ Write-Host ""
+ Write-Host "To test ESRP workflow:"
+ Write-Host "1. Set 'performDummyRelease' parameter to true"
+ Write-Host "2. Re-run this TEST pipeline"
+ Write-Host ""
+ Write-Host "For PRODUCTION release:"
+ Write-Host "1. Use official-release-pipeline.yml instead"
+ Write-Host "2. Official pipeline uses PyPI ContentType"
+ Write-Host "====================================="
diff --git a/OneBranchPipelines/jobs/consolidate-artifacts-job.yml b/OneBranchPipelines/jobs/consolidate-artifacts-job.yml
new file mode 100644
index 00000000..0ef960fc
--- /dev/null
+++ b/OneBranchPipelines/jobs/consolidate-artifacts-job.yml
@@ -0,0 +1,140 @@
+# Consolidate Artifacts Job Template
+# Downloads artifacts from all platform build stages and consolidates into single dist/ folder
+# Works with individual stage artifacts (15 stages total: 7 Windows + 4 macOS + 4 Linux)
+# Each Linux stage builds 4 Python versions, resulting in 27 total wheels
+parameters:
+ - name: oneBranchType
+ type: string
+ default: 'Official'
+
+jobs:
+ - job: ConsolidateArtifacts
+ displayName: 'Consolidate All Platform Artifacts'
+ condition: succeeded()
+
+ pool:
+ type: linux
+ isCustom: true
+ name: Azure Pipelines
+ vmImage: 'ubuntu-latest'
+
+ variables:
+ # Disable BinSkim - consolidation job only downloads artifacts, no binary builds
+ - name: ob_sdl_binskim_enabled
+ value: false
+ - name: ob_outputDirectory
+ value: '$(Build.ArtifactStagingDirectory)'
+
+ steps:
+ - checkout: none # No source code needed for consolidation
+
+ # Download ALL artifacts from current build
+ # Matrix jobs publish as: Windows_, macOS_, Linux_
+ # This downloads all of them automatically (27 total artifacts)
+ - task: DownloadPipelineArtifact@2
+ displayName: 'Download All Platform Artifacts'
+ inputs:
+ buildType: 'current'
+ targetPath: '$(Pipeline.Workspace)/all-artifacts'
+
+ # Consolidate all wheels into single dist/ directory
+ - bash: |
+ set -e
+ echo "Creating consolidated dist directory..."
+ mkdir -p $(ob_outputDirectory)/dist
+
+ echo "=========================================="
+ echo "Searching for all wheel files across all artifacts..."
+ echo "=========================================="
+
+ # List all downloaded artifacts
+ echo "Downloaded artifacts:"
+ ls -la $(Pipeline.Workspace)/all-artifacts/
+
+ echo ""
+ echo "Finding all .whl files..."
+ find $(Pipeline.Workspace)/all-artifacts -name "*.whl" -exec ls -lh {} \;
+
+ echo ""
+ echo "Copying all wheels to consolidated dist/..."
+ find $(Pipeline.Workspace)/all-artifacts -name "*.whl" -exec cp -v {} $(ob_outputDirectory)/dist/ \;
+
+ echo ""
+ echo "=========================================="
+ echo "Consolidation complete! Total wheels:"
+ echo "=========================================="
+ ls -lh $(ob_outputDirectory)/dist/
+ echo ""
+ WHEEL_COUNT=$(ls -1 $(ob_outputDirectory)/dist/*.whl 2>/dev/null | wc -l)
+ echo "Total wheel count: $WHEEL_COUNT"
+ echo "Expected: 27 wheels (7 Windows + 4 macOS + 16 Linux)"
+
+ if [ "$WHEEL_COUNT" -ne 27 ]; then
+ echo "WARNING: Expected 27 wheels but found $WHEEL_COUNT"
+ else
+ echo "SUCCESS: All 27 wheels consolidated!"
+ fi
+ displayName: 'Consolidate wheels from all platforms'
+
+ # Optional: Consolidate native bindings for reference
+ - bash: |
+ set -e
+ echo "Creating bindings directory structure..."
+ mkdir -p $(ob_outputDirectory)/bindings
+
+ echo "Searching for bindings directories..."
+ find $(Pipeline.Workspace)/all-artifacts -type d -name "bindings" | while read dir; do
+ echo "Found bindings in: $dir"
+ cp -rv "$dir"/* $(ob_outputDirectory)/bindings/ 2>/dev/null || true
+ done
+
+ echo "Bindings consolidation complete!"
+ echo "Bindings structure:"
+ find $(ob_outputDirectory)/bindings -type f | head -20
+ displayName: 'Consolidate native bindings (optional)'
+ continueOnError: true
+
+ # Optional: Consolidate Windows symbols
+ - bash: |
+ set -e
+ echo "Searching for symbols directories..."
+ if find $(Pipeline.Workspace)/all-artifacts -type d -name "symbols" | grep -q .; then
+ echo "Copying Windows symbols..."
+ mkdir -p $(ob_outputDirectory)/symbols
+ find $(Pipeline.Workspace)/all-artifacts -type d -name "symbols" | while read dir; do
+ echo "Found symbols in: $dir"
+ cp -rv "$dir"/* $(ob_outputDirectory)/symbols/ 2>/dev/null || true
+ done
+ echo "Symbols consolidation complete!"
+ else
+ echo "No Windows symbols found (expected for NonOfficial builds)"
+ fi
+ displayName: 'Consolidate Windows symbols (optional)'
+ continueOnError: true
+
+ # Verify consolidation
+ - bash: |
+ echo "=========================================="
+ echo "Consolidation Summary"
+ echo "=========================================="
+ echo ""
+ echo "Wheels in dist/:"
+ ls -lh $(ob_outputDirectory)/dist/*.whl || echo "No wheels found!"
+ echo ""
+ echo "Total wheels: $(ls -1 $(ob_outputDirectory)/dist/*.whl 2>/dev/null | wc -l)"
+ echo ""
+ if [ -d "$(ob_outputDirectory)/bindings" ]; then
+ echo "Bindings directory:"
+ find $(ob_outputDirectory)/bindings -type f | head -20
+ fi
+ echo ""
+ echo "=========================================="
+ displayName: 'Verify consolidation'
+
+ # Publish consolidated artifacts
+ - task: PublishPipelineArtifact@1
+ displayName: 'Publish Consolidated Artifacts'
+ inputs:
+ targetPath: '$(ob_outputDirectory)'
+ artifact: 'drop_Consolidate_ConsolidateArtifacts'
+ publishLocation: 'pipeline'
diff --git a/OneBranchPipelines/official-release-pipeline.yml b/OneBranchPipelines/official-release-pipeline.yml
new file mode 100644
index 00000000..d8bca682
--- /dev/null
+++ b/OneBranchPipelines/official-release-pipeline.yml
@@ -0,0 +1,309 @@
+# OneBranch Official Release Pipeline for mssql-python
+# Downloads wheel and symbol artifacts from build pipeline, publishes symbols, and releases wheels to PyPI via ESRP
+# This pipeline is ALWAYS Official - no NonOfficial option
+
+name: $(Year:YY)$(DayOfYear)$(Rev:.r)-Release
+
+# Manual trigger only - releases should be deliberate
+trigger: none
+pr: none
+
+# Parameters for release pipeline
+parameters:
+ - name: packageVersion
+ displayName: 'Package Version (e.g., 0.13.0)'
+ type: string
+ default: '0.13.0'
+
+ - name: publishSymbols
+ displayName: 'Publish Symbols to Symbol Servers'
+ type: boolean
+ default: true
+
+ - name: releaseToPyPI
+ displayName: 'Release to PyPI (Production)'
+ type: boolean
+ default: false # Safety: Default to false to prevent accidental releases
+
+# Variables
+variables:
+ - name: PACKAGE_VERSION
+ value: '${{ parameters.packageVersion }}'
+ readonly: true
+
+ - name: packageVersion
+ value: '${{ parameters.packageVersion }}'
+ readonly: true
+
+ # Common variables
+ - template: /OneBranchPipelines/variables/common-variables.yml@self
+ - template: /OneBranchPipelines/variables/onebranch-variables.yml@self
+
+ # Variable groups
+ - group: 'ESRP Federated Creds (AME)' # Contains ESRP signing credentials
+ - group: 'Symbols Publishing' # Contains SymbolServer, SymbolTokenUri variables
+
+# OneBranch resources
+resources:
+ repositories:
+ - repository: templates
+ type: git
+ name: 'OneBranch.Pipelines/GovernedTemplates'
+ ref: 'refs/heads/main'
+
+ # Reference to the build pipeline
+ pipelines:
+ - pipeline: buildPipeline
+ source: 'Build-Release-Package-Pipeline' # Name of the build pipeline
+ trigger: none # Manual trigger only
+
+# Extend OneBranch official template
+# Always uses Official template for release pipeline
+extends:
+ template: 'v2/OneBranch.Official.CrossPlat.yml@templates'
+
+ parameters:
+ # Feature flags
+ featureFlags:
+ WindowsHostVersion:
+ Version: '2022'
+
+ # Global SDL Configuration
+ globalSdl:
+ # Global Guardian baseline and suppression files
+ baseline:
+ baselineFile: $(Build.SourcesDirectory)\.gdn\.gdnbaselines
+ suppressionSet: default
+ suppression:
+ suppressionFile: $(Build.SourcesDirectory)\.gdn\.gdnsuppress
+ suppressionSet: default
+
+ # Minimal SDL for release pipeline - artifacts already scanned during build
+ binskim:
+ enabled: true
+ break: true
+
+ credscan:
+ enabled: true
+
+ policheck:
+ enabled: true
+ break: true
+ exclusionFile: '$(REPO_ROOT)/.config/PolicheckExclusions.xml'
+
+ # Publish SDL logs
+ publishLogs:
+ enabled: true
+
+ # TSA - Always enabled for Official release pipeline
+ tsa:
+ enabled: true
+ configFile: '$(REPO_ROOT)/.config/tsaoptions.json'
+
+ # Pipeline stages
+ stages:
+ - stage: ReleasePackages
+ displayName: 'Release Python Packages to PyPI'
+
+ jobs:
+ - job: DownloadAndRelease
+ displayName: 'Download Artifacts and Release via ESRP'
+
+ pool:
+ type: windows
+ isCustom: true
+ name: Django-1ES-pool
+ vmImage: WIN22-SQL22
+
+ variables:
+ ob_outputDirectory: '$(Build.ArtifactStagingDirectory)'
+
+ steps:
+ # Step 1: Download consolidated artifacts from build pipeline
+ - task: DownloadPipelineArtifact@2
+ displayName: 'Download Consolidated Artifacts from Build Pipeline'
+ inputs:
+ buildType: 'specific'
+ project: '$(System.TeamProject)'
+ definition: 2199 # Build-Release-Package-Pipeline definition ID
+ buildVersionToDownload: 'specific'
+ buildId: $(resources.pipeline.buildPipeline.runID) # Use the build run selected in UI
+ artifactName: 'drop_Consolidate_ConsolidateArtifacts' # Consolidated artifact with dist/ and symbols/
+ targetPath: '$(Build.SourcesDirectory)/artifacts'
+
+ # Step 3: List downloaded artifacts for verification
+ - task: PowerShell@2
+ displayName: 'List Downloaded Wheel and Symbol Files'
+ inputs:
+ targetType: 'inline'
+ script: |
+ Write-Host "====================================="
+ Write-Host "Downloaded Artifacts:"
+ Write-Host "====================================="
+
+ # List wheel files
+ $wheelsPath = "$(Build.SourcesDirectory)/artifacts/dist"
+ if (Test-Path $wheelsPath) {
+ $wheels = Get-ChildItem -Path $wheelsPath -Filter "*.whl" -Recurse
+
+ Write-Host "`n[WHEELS] Total wheel files found: $($wheels.Count)"
+ foreach ($wheel in $wheels) {
+ $size = [math]::Round($wheel.Length / 1MB, 2)
+ Write-Host " - $($wheel.Name) (${size} MB)"
+ }
+
+ # Copy wheels to dist folder for ESRP
+ Write-Host "`nCopying wheels to $(Build.SourcesDirectory)/dist..."
+ New-Item -ItemType Directory -Force -Path "$(Build.SourcesDirectory)/dist" | Out-Null
+ Copy-Item -Path "$wheelsPath/*.whl" -Destination "$(Build.SourcesDirectory)/dist/" -Force
+
+ } else {
+ Write-Error "Wheel directory not found at: $wheelsPath"
+ exit 1
+ }
+
+ # List symbol files
+ $symbolsPath = "$(Build.SourcesDirectory)/artifacts/symbols"
+ if (Test-Path $symbolsPath) {
+ $symbols = Get-ChildItem -Path $symbolsPath -Filter "*.pdb" -Recurse
+
+ Write-Host "`n[SYMBOLS] Total PDB files found: $($symbols.Count)"
+ foreach ($symbol in $symbols) {
+ $size = [math]::Round($symbol.Length / 1KB, 2)
+ Write-Host " - $($symbol.Name) (${size} KB)"
+ }
+
+ # Copy symbols to symbols folder for publishing
+ Write-Host "`nCopying symbols to $(Build.SourcesDirectory)/symbols..."
+ New-Item -ItemType Directory -Force -Path "$(Build.SourcesDirectory)/symbols" | Out-Null
+ Copy-Item -Path "$symbolsPath/*.pdb" -Destination "$(Build.SourcesDirectory)/symbols/" -Force
+
+ } else {
+ Write-Warning "Symbol directory not found at: $symbolsPath"
+ Write-Warning "Symbol publishing will be skipped if no PDB files found"
+ }
+
+ Write-Host "`n====================================="
+ Write-Host "Summary:"
+ Write-Host "Wheels: $($wheels.Count) files"
+ Write-Host "Symbols: $(if ($symbols) { $symbols.Count } else { 0 }) files"
+ Write-Host "====================================="
+
+ # Step 4: Verify wheel integrity
+ - task: PowerShell@2
+ displayName: 'Verify Wheel Integrity'
+ inputs:
+ targetType: 'inline'
+ script: |
+ Write-Host "Verifying wheel file integrity..."
+
+ $wheels = Get-ChildItem -Path "$(Build.SourcesDirectory)/dist" -Filter "*.whl"
+ $allValid = $true
+
+ foreach ($wheel in $wheels) {
+ # Check if wheel is a valid ZIP file
+ try {
+ Add-Type -AssemblyName System.IO.Compression.FileSystem
+ $zip = [System.IO.Compression.ZipFile]::OpenRead($wheel.FullName)
+ $entryCount = $zip.Entries.Count
+ $zip.Dispose()
+
+ Write-Host "✓ $($wheel.Name) - Valid ($entryCount entries)"
+ }
+ catch {
+ Write-Error "✗ $($wheel.Name) - INVALID: $_"
+ $allValid = $false
+ }
+ }
+
+ if (-not $allValid) {
+ Write-Error "One or more wheel files are corrupted"
+ exit 1
+ }
+
+ Write-Host "`nAll wheels verified successfully!"
+
+ # Step 5: Publish Symbols (if enabled and symbols exist)
+ - ${{ if eq(parameters.publishSymbols, true) }}:
+ - template: /OneBranchPipelines/steps/symbol-publishing-step.yml@self
+ parameters:
+ SymbolsFolder: '$(Build.SourcesDirectory)/symbols'
+
+ # Step 6: Copy wheels to ob_outputDirectory for OneBranch artifact publishing
+ - task: CopyFiles@2
+ displayName: 'Stage Wheels for Release'
+ inputs:
+ SourceFolder: '$(Build.SourcesDirectory)/dist'
+ Contents: '*.whl'
+ TargetFolder: '$(ob_outputDirectory)/release'
+ flattenFolders: true
+
+ # Step 7: ESRP Release Task (only if releaseToPyPI is true)
+ - ${{ if eq(parameters.releaseToPyPI, true) }}:
+ - task: EsrpRelease@9
+ displayName: 'ESRP Release to PyPI'
+ inputs:
+ connectedservicename: '$(ESRPConnectedServiceName)'
+ usemanagedidentity: true
+ keyvaultname: '$(AuthAKVName)'
+ signcertname: '$(AuthSignCertName)'
+ clientid: '$(EsrpClientId)'
+ Intent: 'PackageDistribution'
+ ContentType: 'PyPI'
+ ContentSource: 'Folder'
+ FolderLocation: '$(Build.SourcesDirectory)/dist'
+ WaitForReleaseCompletion: true
+ Owners: '$(owner)'
+ Approvers: '$(approver)'
+ ServiceEndpointUrl: 'https://api.esrp.microsoft.com'
+ MainPublisher: 'ESRPRELPACMAN'
+ DomainTenantId: '$(DomainTenantId)'
+
+ # Step 8: Show release status
+ - ${{ if eq(parameters.releaseToPyPI, true) }}:
+ - task: PowerShell@2
+ displayName: 'Release Summary'
+ inputs:
+ targetType: 'inline'
+ script: |
+ Write-Host "====================================="
+ Write-Host "ESRP Release Completed"
+ Write-Host "====================================="
+ Write-Host "Package: mssql-python"
+ Write-Host "Version: ${{ parameters.packageVersion }}"
+ Write-Host "Target: PyPI"
+ Write-Host "Owners: $(owner)"
+ Write-Host "Approvers: $(approver)"
+ Write-Host "Symbols Published: ${{ parameters.publishSymbols }}"
+ Write-Host "====================================="
+ Write-Host ""
+ Write-Host "Next steps:"
+ Write-Host "1. Verify release in ESRP portal"
+ Write-Host "2. Wait for approval workflow completion"
+ Write-Host "3. Verify package on PyPI: https://pypi.org/project/mssql-python/"
+ Write-Host "4. Verify symbols in SqlClientDrivers org (if published)"
+ Write-Host "====================================="
+
+ - ${{ if eq(parameters.releaseToPyPI, false) }}:
+ - task: PowerShell@2
+ displayName: 'Dry Run - Release Skipped'
+ inputs:
+ targetType: 'inline'
+ script: |
+ Write-Host "====================================="
+ Write-Host "DRY RUN MODE - No Release Performed"
+ Write-Host "====================================="
+ Write-Host "Package: mssql-python"
+ Write-Host "Version: ${{ parameters.packageVersion }}"
+ Write-Host ""
+ Write-Host "Actions performed:"
+ Write-Host "- Downloaded wheels from build pipeline"
+ Write-Host "- Downloaded symbols from build pipeline"
+ if ("${{ parameters.publishSymbols }}" -eq "True") {
+ Write-Host "- Published symbols to SqlClientDrivers org"
+ }
+ Write-Host ""
+ Write-Host "To perform actual release:"
+ Write-Host "1. Set 'releaseToPyPI' parameter to true"
+ Write-Host "2. Re-run pipeline"
+ Write-Host "====================================="
diff --git a/OneBranchPipelines/stages/build-linux-single-stage.yml b/OneBranchPipelines/stages/build-linux-single-stage.yml
new file mode 100644
index 00000000..58003c5d
--- /dev/null
+++ b/OneBranchPipelines/stages/build-linux-single-stage.yml
@@ -0,0 +1,253 @@
+# Linux Single Configuration Stage Template
+# Builds Python wheels for a specific Linux distribution and architecture
+# Builds for Python 3.10, 3.11, 3.12, 3.13 within single job
+parameters:
+ - name: stageName
+ type: string
+ - name: jobName
+ type: string
+ default: 'BuildWheels'
+ - name: linuxTag
+ type: string # 'manylinux' or 'musllinux'
+ - name: arch
+ type: string # 'x86_64' or 'aarch64'
+ - name: dockerPlatform
+ type: string # 'linux/amd64' or 'linux/arm64'
+ - name: oneBranchType
+ type: string
+ default: 'Official'
+ - name: signingEnabled
+ type: boolean
+ default: true
+ - name: buildConfiguration
+ type: string
+ default: 'Release'
+
+stages:
+ - stage: ${{ parameters.stageName }}
+ displayName: 'Build Linux ${{ parameters.linuxTag }} ${{ parameters.arch }}'
+ jobs:
+ - job: ${{ parameters.jobName }}
+ displayName: 'Build Wheels - ${{ parameters.linuxTag }} ${{ parameters.arch }}'
+
+ pool:
+ type: linux
+ isCustom: true
+ name: Django-1ES-pool
+ demands:
+ - imageOverride -equals ADO-UB22-SQL22
+ timeoutInMinutes: 120
+
+ variables:
+ # Disable BinSkim for Linux - requires ICU libraries not available in containers
+ - name: ob_sdl_binskim_enabled
+ value: false
+ - name: ob_outputDirectory
+ value: '$(Build.ArtifactStagingDirectory)'
+ - name: LinuxContainerImage
+ value: 'onebranch.azurecr.io/linux/ubuntu-2204:latest'
+ - name: LINUX_TAG
+ value: ${{ parameters.linuxTag }}
+ - name: ARCH
+ value: ${{ parameters.arch }}
+ - name: DOCKER_PLATFORM
+ value: ${{ parameters.dockerPlatform }}
+
+ steps:
+ - checkout: self
+ fetchDepth: 0
+
+ # Install Docker
+ - task: DockerInstaller@0
+ inputs:
+ dockerVersion: '20.10.21'
+ displayName: 'Install Docker'
+
+ - bash: |
+ set -e
+ echo "Verifying we're on Linux..."
+ if [[ "$(uname -s)" != "Linux" ]]; then
+ echo "ERROR: This job requires a Linux agent but got: $(uname -s)"
+ echo "Agent info: $(uname -a)"
+ exit 1
+ fi
+
+ uname -a
+
+ # Start dockerd
+ sudo dockerd > docker.log 2>&1 &
+ sleep 10
+
+ # Verify Docker works
+ docker --version
+ docker info
+ displayName: 'Setup and start Docker daemon'
+
+ - script: |
+ docker run --rm --privileged tonistiigi/binfmt --install all
+ displayName: 'Enable QEMU (for aarch64)'
+
+ - script: |
+ rm -rf $(ob_outputDirectory)/dist $(ob_outputDirectory)/bindings
+ mkdir -p $(ob_outputDirectory)/dist
+ mkdir -p $(ob_outputDirectory)/bindings/$(LINUX_TAG)-$(ARCH)
+ displayName: 'Prepare artifact directories'
+
+ - script: |
+ # Determine image based on LINUX_TAG and ARCH
+ if [[ "$(LINUX_TAG)" == "musllinux" ]]; then
+ IMAGE="quay.io/pypa/musllinux_1_2_$(ARCH)"
+ else
+ IMAGE="quay.io/pypa/manylinux_2_28_$(ARCH)"
+ fi
+
+ docker run -d --name build-$(LINUX_TAG)-$(ARCH) \
+ --platform $(DOCKER_PLATFORM) \
+ -v $(Build.SourcesDirectory):/workspace \
+ -w /workspace \
+ $IMAGE \
+ tail -f /dev/null
+ displayName: 'Start $(LINUX_TAG) $(ARCH) container'
+
+ - script: |
+ set -euxo pipefail
+ export PATH=$PATH:`pwd`/docker
+ if [[ "$(LINUX_TAG)" == "manylinux" ]]; then
+ docker exec build-$(LINUX_TAG)-$(ARCH) bash -lc '
+ set -euxo pipefail
+ if command -v dnf >/dev/null 2>&1; then
+ dnf -y update || true
+ dnf -y install gcc gcc-c++ make cmake unixODBC-devel krb5-libs keyutils-libs ccache || true
+ elif command -v yum >/dev/null 2>&1; then
+ yum -y update || true
+ yum -y install gcc gcc-c++ make cmake unixODBC-devel krb5-libs keyutils-libs ccache || true
+ fi
+ gcc --version || true
+ cmake --version || true
+ '
+ else
+ docker exec build-$(LINUX_TAG)-$(ARCH) sh -lc '
+ set -euxo pipefail
+ apk update || true
+ apk add --no-cache bash build-base cmake unixodbc-dev krb5-libs keyutils-libs ccache || true
+ gcc --version || true
+ cmake --version || true
+ '
+ fi
+ displayName: 'Install system build dependencies'
+
+ - script: |
+ set -euxo pipefail
+ if [[ "$(LINUX_TAG)" == "manylinux" ]]; then SHELL_EXE=bash; else SHELL_EXE=sh; fi
+ docker exec build-$(LINUX_TAG)-$(ARCH) $SHELL_EXE -lc 'mkdir -p /workspace/dist'
+
+ for PYBIN in cp310 cp311 cp312 cp313; do
+ echo "=== Building for $PYBIN on $(LINUX_TAG)/$(ARCH) ==="
+ if [[ "$(LINUX_TAG)" == "manylinux" ]]; then
+ docker exec -e PYBIN=$PYBIN build-$(LINUX_TAG)-$(ARCH) bash -lc '
+ set -euxo pipefail;
+ PY=/opt/python/${PYBIN}-${PYBIN}/bin/python;
+ test -x $PY || { echo "Python $PY missing"; exit 0; };
+ ln -sf $PY /usr/local/bin/python;
+ python -m pip install -U pip setuptools wheel pybind11;
+ cd /workspace/mssql_python/pybind;
+ bash build.sh;
+ cd /workspace;
+ python setup.py bdist_wheel;
+ '
+ else
+ docker exec -e PYBIN=$PYBIN build-$(LINUX_TAG)-$(ARCH) sh -lc '
+ set -euxo pipefail;
+ PY=/opt/python/${PYBIN}-${PYBIN}/bin/python;
+ test -x $PY || { echo "Python $PY missing"; exit 0; };
+ ln -sf $PY /usr/local/bin/python;
+ python -m pip install -U pip setuptools wheel pybind11;
+ cd /workspace/mssql_python/pybind;
+ bash build.sh;
+ cd /workspace;
+ python setup.py bdist_wheel;
+ '
+ fi
+ done
+ displayName: 'Build wheels for Python 3.10-3.13'
+
+ - script: |
+ set -euxo pipefail
+ docker cp build-$(LINUX_TAG)-$(ARCH):/workspace/dist/. "$(ob_outputDirectory)/wheels/" || echo "No wheels"
+
+ mkdir -p "$(ob_outputDirectory)/bindings/$(LINUX_TAG)-$(ARCH)"
+ docker exec build-$(LINUX_TAG)-$(ARCH) $([[ "$(LINUX_TAG)" == "manylinux" ]] && echo bash -lc || echo sh -lc) '
+ OUT="/tmp/ddbc-out";
+ rm -rf "$OUT"; mkdir -p "$OUT";
+ find /workspace/mssql_python -maxdepth 1 -type f -name "*.so" -exec cp -v {} "$OUT"/ \; || true
+ '
+
+ docker cp "build-$(LINUX_TAG)-$(ARCH):/tmp/ddbc-out/." \
+ "$(ob_outputDirectory)/bindings/$(LINUX_TAG)-$(ARCH)/" || echo "No .so files"
+ displayName: 'Copy artifacts to host'
+
+ - script: |
+ docker stop build-$(LINUX_TAG)-$(ARCH) || true
+ docker rm build-$(LINUX_TAG)-$(ARCH) || true
+ displayName: 'Cleanup container'
+ condition: always()
+
+ # Explicit publish with OneBranch-compliant artifact name
+ - task: PublishPipelineArtifact@1
+ displayName: 'Publish Linux Artifacts'
+ inputs:
+ targetPath: '$(ob_outputDirectory)'
+ artifact: 'drop_${{ parameters.stageName }}_${{ parameters.jobName }}'
+ publishLocation: 'pipeline'
+
+ # General malware scanning (Component Governance + OneBranch AntiMalware)
+ - template: ../steps/malware-scanning-step.yml@self
+ parameters:
+ scanPath: '$(ob_outputDirectory)'
+ artifactType: 'dll'
+
+ # ESRP Malware scanning (Official builds only)
+ - ${{ if and(eq(parameters.signingEnabled, true), eq(parameters.oneBranchType, 'Official')) }}:
+ - task: EsrpMalwareScanning@5
+ displayName: 'ESRP MalwareScanning - Python Wheels (Official)'
+ inputs:
+ ConnectedServiceName: '$(SigningEsrpConnectedServiceName)'
+ AppRegistrationClientId: '$(SigningAppRegistrationClientId)'
+ AppRegistrationTenantId: '$(SigningAppRegistrationTenantId)'
+ EsrpClientId: '$(SigningEsrpClientId)'
+ UseMSIAuthentication: true
+ FolderPath: '$(ob_outputDirectory)/wheels'
+ Pattern: '*.whl'
+ SessionTimeout: 60
+ CleanupTempStorage: 1
+ VerboseLogin: 1
+
+ # ESRP Malware scanning (when signing is enabled)
+ - ${{ if eq(parameters.signingEnabled, true) }}:
+ - task: EsrpMalwareScanning@5
+ displayName: 'ESRP MalwareScanning - Python Wheels'
+ inputs:
+ ConnectedServiceName: '$(SigningEsrpConnectedServiceName)'
+ AppRegistrationClientId: '$(SigningAppRegistrationClientId)'
+ AppRegistrationTenantId: '$(SigningAppRegistrationTenantId)'
+ EsrpClientId: '$(SigningEsrpClientId)'
+ UseMSIAuthentication: true
+ FolderPath: '$(ob_outputDirectory)/wheels'
+ Pattern: '*.whl'
+ SessionTimeout: 60
+ CleanupTempStorage: 1
+ VerboseLogin: 1
+
+ # ESRP Code Signing (DISABLED - wheel files cannot be signed with SignTool)
+ # See compound-esrp-code-signing-step.yml for detailed explanation of why this doesn't work
+ # - ${{ if eq(parameters.signingEnabled, true) }}:
+ # - template: /OneBranchPipelines/steps/compound-esrp-code-signing-step.yml@self
+ # parameters:
+ # appRegistrationClientId: '$(SigningAppRegistrationClientId)'
+ # appRegistrationTenantId: '$(SigningAppRegistrationTenantId)'
+ # artifactType: 'whl'
+ # authAkvName: '$(SigningAuthAkvName)'
+ # authSignCertName: '$(SigningAuthSignCertName)'
+ # esrpClientId: '$(SigningEsrpClientId)'
+ # esrpConnectedServiceName: '$(SigningEsrpConnectedServiceName)'
+ # signPath: '$(ob_outputDirectory)/wheels'
diff --git a/OneBranchPipelines/stages/build-macos-single-stage.yml b/OneBranchPipelines/stages/build-macos-single-stage.yml
new file mode 100644
index 00000000..ee538098
--- /dev/null
+++ b/OneBranchPipelines/stages/build-macos-single-stage.yml
@@ -0,0 +1,194 @@
+# macOS Single Configuration Stage Template
+# Builds Python wheel for a specific Python version (universal2 binary)
+parameters:
+ - name: stageName
+ type: string
+ - name: jobName
+ type: string
+ default: 'BuildWheel'
+ - name: pythonVersion
+ type: string
+ - name: shortPyVer
+ type: string
+ - name: oneBranchType
+ type: string
+ default: 'Official'
+ - name: signingEnabled
+ type: boolean
+ default: true
+ - name: buildConfiguration
+ type: string
+ default: 'Release'
+
+stages:
+ - stage: ${{ parameters.stageName }}
+ displayName: 'Build macOS Python ${{ parameters.pythonVersion }}'
+ jobs:
+ - job: ${{ parameters.jobName }}
+ displayName: 'Build Wheel - Python ${{ parameters.pythonVersion }} universal2'
+
+ pool:
+ type: linux
+ isCustom: true
+ name: Azure Pipelines
+ vmImage: 'macOS-14'
+ timeoutInMinutes: 120
+
+ variables:
+ # Disable BinSkim for macOS - primarily designed for Windows binaries
+ - name: ob_sdl_binskim_enabled
+ value: false
+ - name: ob_outputDirectory
+ value: '$(Build.ArtifactStagingDirectory)'
+ - name: LinuxContainerImage
+ value: 'onebranch.azurecr.io/linux/ubuntu-2204:latest'
+ - name: pythonVersion
+ value: ${{ parameters.pythonVersion }}
+ - name: shortPyVer
+ value: ${{ parameters.shortPyVer }}
+
+ steps:
+ - checkout: self
+ fetchDepth: 0
+
+ - task: UsePythonVersion@0
+ inputs:
+ versionSpec: '${{ parameters.pythonVersion }}'
+ addToPath: true
+ displayName: 'Use Python ${{ parameters.pythonVersion }} (Universal2)'
+ continueOnError: false
+
+ - script: |
+ brew update
+ brew uninstall cmake --ignore-dependencies || echo "CMake not installed"
+ brew install cmake
+ displayName: 'Install CMake'
+
+ - script: |
+ python --version
+ python -m pip --version
+ python -m pip install --upgrade pip
+ python -m pip install -r requirements.txt
+ python -m pip install cmake pybind11
+ displayName: 'Install dependencies'
+
+ - script: |
+ echo "Python Version: ${{ parameters.pythonVersion }}"
+ echo "Building Universal2 Binary"
+ cd "$(Build.SourcesDirectory)/mssql_python/pybind"
+ ./build.sh
+ displayName: 'Build .so file'
+ continueOnError: false
+
+ - task: CopyFiles@2
+ inputs:
+ SourceFolder: '$(Build.SourcesDirectory)/mssql_python'
+ Contents: '*.so'
+ TargetFolder: '$(ob_outputDirectory)/bindings/macOS'
+ displayName: 'Copy .so files'
+
+ - script: |
+ brew update
+ brew install docker colima
+ colima start --cpu 3 --memory 10 --disk 30 --vm-type=vz || \
+ colima start --cpu 3 --memory 10 --disk 30 --vm-type=qemu
+ sleep 30
+ docker context use colima >/dev/null || true
+ docker version
+ displayName: 'Install and start Docker (Colima)'
+ timeoutInMinutes: 15
+
+ - script: |
+ docker pull mcr.microsoft.com/mssql/server:2022-latest
+ docker run --name sqlserver \
+ -e ACCEPT_EULA=Y \
+ -e MSSQL_SA_PASSWORD="${DB_PASSWORD}" \
+ -p 1433:1433 -d \
+ mcr.microsoft.com/mssql/server:2022-latest
+
+ for i in {1..30}; do
+ docker exec sqlserver /opt/mssql-tools18/bin/sqlcmd \
+ -S localhost -U SA -P "$DB_PASSWORD" -C -Q "SELECT 1" && break
+ sleep 2
+ done
+ displayName: 'Start SQL Server (Docker)'
+ env:
+ DB_PASSWORD: $(DB_PASSWORD)
+
+ - script: |
+ python -m pytest -v
+ displayName: 'Run pytests'
+ env:
+ DB_CONNECTION_STRING: 'Server=tcp:127.0.0.1,1433;Database=master;Uid=SA;Pwd=$(DB_PASSWORD);TrustServerCertificate=yes'
+
+ - script: |
+ python -m pip install --upgrade pip wheel setuptools
+ python setup.py bdist_wheel
+ displayName: 'Build wheel package'
+
+ - task: CopyFiles@2
+ inputs:
+ SourceFolder: '$(Build.SourcesDirectory)/dist'
+ Contents: '*.whl'
+ TargetFolder: '$(ob_outputDirectory)/wheels'
+ displayName: 'Copy wheel files'
+
+ # Explicit publish with OneBranch-compliant artifact name
+ - task: PublishPipelineArtifact@1
+ displayName: 'Publish macOS Artifacts'
+ inputs:
+ targetPath: '$(ob_outputDirectory)'
+ artifact: 'drop_${{ parameters.stageName }}_${{ parameters.jobName }}'
+ publishLocation: 'pipeline'
+
+ # General malware scanning (Component Governance + OneBranch AntiMalware)
+ - template: ../steps/malware-scanning-step.yml@self
+ parameters:
+ scanPath: '$(ob_outputDirectory)'
+ artifactType: 'dll'
+
+ # ESRP Malware scanning (Official builds only)
+ - ${{ if and(eq(parameters.signingEnabled, true), eq(parameters.oneBranchType, 'Official')) }}:
+ - task: EsrpMalwareScanning@5
+ displayName: 'ESRP MalwareScanning - Python Wheels (Official)'
+ inputs:
+ ConnectedServiceName: '$(SigningEsrpConnectedServiceName)'
+ AppRegistrationClientId: '$(SigningAppRegistrationClientId)'
+ AppRegistrationTenantId: '$(SigningAppRegistrationTenantId)'
+ EsrpClientId: '$(SigningEsrpClientId)'
+ UseMSIAuthentication: true
+ FolderPath: '$(ob_outputDirectory)/wheels'
+ Pattern: '*.whl'
+ SessionTimeout: 60
+ CleanupTempStorage: 1
+ VerboseLogin: 1
+
+ # ESRP Malware scanning (when signing is enabled)
+ - ${{ if eq(parameters.signingEnabled, true) }}:
+ - task: EsrpMalwareScanning@5
+ displayName: 'ESRP MalwareScanning - Python Wheels'
+ inputs:
+ ConnectedServiceName: '$(SigningEsrpConnectedServiceName)'
+ AppRegistrationClientId: '$(SigningAppRegistrationClientId)'
+ AppRegistrationTenantId: '$(SigningAppRegistrationTenantId)'
+ EsrpClientId: '$(SigningEsrpClientId)'
+ UseMSIAuthentication: true
+ FolderPath: '$(ob_outputDirectory)/wheels'
+ Pattern: '*.whl'
+ SessionTimeout: 60
+ CleanupTempStorage: 1
+ VerboseLogin: 1
+
+ # ESRP Code Signing (DISABLED - wheel files cannot be signed with SignTool)
+ # See compound-esrp-code-signing-step.yml for detailed explanation of why this doesn't work
+ # - ${{ if eq(parameters.signingEnabled, true) }}:
+ # - template: /OneBranchPipelines/steps/compound-esrp-code-signing-step.yml@self
+ # parameters:
+ # appRegistrationClientId: '$(SigningAppRegistrationClientId)'
+ # appRegistrationTenantId: '$(SigningAppRegistrationTenantId)'
+ # artifactType: 'whl'
+ # authAkvName: '$(SigningAuthAkvName)'
+ # authSignCertName: '$(SigningAuthSignCertName)'
+ # esrpClientId: '$(SigningEsrpClientId)'
+ # esrpConnectedServiceName: '$(SigningEsrpConnectedServiceName)'
+ # signPath: '$(ob_outputDirectory)/wheels'
diff --git a/OneBranchPipelines/stages/build-windows-single-stage.yml b/OneBranchPipelines/stages/build-windows-single-stage.yml
new file mode 100644
index 00000000..2cf1ef68
--- /dev/null
+++ b/OneBranchPipelines/stages/build-windows-single-stage.yml
@@ -0,0 +1,222 @@
+# Windows Single Configuration Stage Template
+# Builds Python wheel for a specific Python version and architecture
+parameters:
+ - name: stageName
+ type: string
+ - name: jobName
+ type: string
+ default: 'BuildWheel'
+ - name: pythonVersion
+ type: string
+ - name: shortPyVer
+ type: string
+ - name: architecture
+ type: string
+ - name: oneBranchType
+ type: string
+ default: 'Official'
+ - name: signingEnabled
+ type: boolean
+ default: true
+ - name: buildConfiguration
+ type: string
+ default: 'Release'
+ - name: publishSymbols
+ type: boolean
+ default: true
+
+stages:
+ - stage: ${{ parameters.stageName }}
+ displayName: 'Build Windows Python ${{ parameters.pythonVersion }} ${{ parameters.architecture }}'
+ jobs:
+ - job: ${{ parameters.jobName }}
+ displayName: 'Build Wheel - Python ${{ parameters.pythonVersion }} ${{ parameters.architecture }}'
+ pool:
+ type: windows
+ isCustom: true
+ name: Django-1ES-pool
+ vmImage: WIN22-SQL22
+ timeoutInMinutes: 120
+
+ variables:
+ ob_outputDirectory: '$(Build.ArtifactStagingDirectory)'
+ LinuxContainerImage: 'onebranch.azurecr.io/linux/ubuntu-2204:latest'
+ pythonVersion: ${{ parameters.pythonVersion }}
+ shortPyVer: ${{ parameters.shortPyVer }}
+ targetArch: ${{ parameters.architecture }}
+ SYSTEM_ACCESSTOKEN: $(System.AccessToken) # Make System.AccessToken available to all steps in this job
+
+ steps:
+ - checkout: self
+ fetchDepth: 0
+
+ - task: UsePythonVersion@0
+ inputs:
+ versionSpec: '${{ parameters.pythonVersion }}'
+ architecture: 'x64'
+ addToPath: true
+ displayName: 'Use Python ${{ parameters.pythonVersion }} (${{ parameters.architecture }})'
+ continueOnError: false
+
+ - powershell: |
+ Write-Host "Python version:"
+ python --version
+ Write-Host "Python location:"
+ python -c "import sys; print(sys.executable)"
+ Write-Host "Architecture:"
+ python -c "import platform; print(platform.machine())"
+ displayName: 'Verify Python installation'
+
+ - powershell: |
+ $ErrorActionPreference = "Stop"
+ Write-Host "Installing Python dependencies..."
+ python -m pip install --upgrade pip
+ python -m pip install setuptools wheel pybind11 pytest pyodbc
+ Write-Host "Dependencies installed successfully"
+ displayName: 'Install Python dependencies'
+
+ - powershell: |
+ sqllocaldb create MSSQLLocalDB
+ sqllocaldb start MSSQLLocalDB
+ displayName: 'Start LocalDB instance'
+
+ - powershell: |
+ sqlcmd -S "(localdb)\MSSQLLocalDB" -Q "CREATE DATABASE TestDB"
+ sqlcmd -S "(localdb)\MSSQLLocalDB" -Q "CREATE LOGIN testuser WITH PASSWORD = '$(DB_PASSWORD)'"
+ sqlcmd -S "(localdb)\MSSQLLocalDB" -d TestDB -Q "CREATE USER testuser FOR LOGIN testuser"
+ sqlcmd -S "(localdb)\MSSQLLocalDB" -d TestDB -Q "ALTER ROLE db_owner ADD MEMBER testuser"
+ displayName: 'Setup database and user'
+ env:
+ DB_PASSWORD: $(DB_PASSWORD)
+
+ - task: DownloadPipelineArtifact@2
+ condition: eq(variables['targetArch'], 'arm64')
+ inputs:
+ buildType: 'specific'
+ project: '$(System.TeamProject)'
+ definition: 2162
+ buildVersionToDownload: 'latest'
+ artifactName: 'mssql-python-arm64-libs'
+ targetPath: '$(Build.SourcesDirectory)\mssql_python\pybind\python_libs\arm64'
+ displayName: 'Download ARM64 Python libs'
+
+ - script: |
+ echo "Python Version: $(pythonVersion)"
+ echo "Short Tag: $(shortPyVer)"
+ echo "Architecture: Host=$(architecture), Target=$(targetArch)"
+
+ cd "$(Build.SourcesDirectory)\mssql_python\pybind"
+
+ REM Override lib path for ARM64
+ if "$(targetArch)"=="arm64" (
+ echo Using arm64-specific Python library...
+ set CUSTOM_PYTHON_LIB_DIR=$(Build.SourcesDirectory)\mssql_python\pybind\python_libs\arm64
+ )
+
+ call build.bat $(targetArch)
+ call keep_single_arch.bat $(targetArch)
+
+ cd ..\..
+ displayName: 'Build PYD for $(targetArch)'
+ continueOnError: false
+
+ - powershell: |
+ Write-Host "Running pytests to validate bindings"
+ if ("$(targetArch)" -eq "arm64") {
+ Write-Host "Skipping pytests on Windows ARM64"
+ } else {
+ python -m pytest -v
+ }
+ displayName: 'Run pytests'
+ env:
+ DB_CONNECTION_STRING: 'Server=(localdb)\MSSQLLocalDB;Database=TestDB;Uid=testuser;Pwd=$(DB_PASSWORD);TrustServerCertificate=yes'
+
+ - task: CopyFiles@2
+ inputs:
+ SourceFolder: '$(Build.SourcesDirectory)\mssql_python\pybind\build\$(targetArch)\py$(shortPyVer)\Release'
+ Contents: 'ddbc_bindings.cp$(shortPyVer)-*.pyd'
+ TargetFolder: '$(ob_outputDirectory)\bindings\windows'
+ displayName: 'Copy PYD files'
+
+ - task: CopyFiles@2
+ inputs:
+ SourceFolder: '$(Build.SourcesDirectory)\mssql_python\pybind\build\$(targetArch)\py$(shortPyVer)\Release'
+ Contents: 'ddbc_bindings.cp$(shortPyVer)-*.pdb'
+ TargetFolder: '$(ob_outputDirectory)\symbols'
+ displayName: 'Copy PDB files'
+
+ # Copy files to ApiScan directories (for globalSdl scanning)
+ - task: CopyFiles@2
+ inputs:
+ SourceFolder: '$(Build.SourcesDirectory)\mssql_python\pybind\build\$(targetArch)\py$(shortPyVer)\Release'
+ Contents: 'ddbc_bindings.cp$(shortPyVer)-*.pyd'
+ TargetFolder: '$(Build.SourcesDirectory)\apiScan\dlls\windows\py$(shortPyVer)\$(targetArch)'
+ displayName: 'Copy PYD to ApiScan directory'
+
+ - task: CopyFiles@2
+ inputs:
+ SourceFolder: '$(Build.SourcesDirectory)\mssql_python\pybind\build\$(targetArch)\py$(shortPyVer)\Release'
+ Contents: 'ddbc_bindings.cp$(shortPyVer)-*.pdb'
+ TargetFolder: '$(Build.SourcesDirectory)\apiScan\pdbs\windows\py$(shortPyVer)\$(targetArch)'
+ displayName: 'Copy PDB to ApiScan directory'
+
+
+ - script: |
+ python -m pip install --upgrade pip wheel setuptools
+ set ARCHITECTURE=$(targetArch)
+ python setup.py bdist_wheel
+ displayName: 'Build wheel package'
+
+ - task: CopyFiles@2
+ inputs:
+ SourceFolder: '$(Build.SourcesDirectory)\dist'
+ Contents: '*.whl'
+ TargetFolder: '$(ob_outputDirectory)\wheels'
+ displayName: 'Copy wheel files'
+
+ # Explicit publish with OneBranch-compliant artifact name
+ - task: PublishPipelineArtifact@1
+ displayName: 'Publish Windows Artifacts'
+ inputs:
+ targetPath: '$(ob_outputDirectory)'
+ artifact: 'drop_${{ parameters.stageName }}_${{ parameters.jobName }}'
+ publishLocation: 'pipeline'
+
+ # General malware scanning (Component Governance + OneBranch AntiMalware)
+ - template: /OneBranchPipelines/steps/malware-scanning-step.yml@self
+ parameters:
+ scanPath: '$(ob_outputDirectory)'
+ artifactType: 'dll'
+
+ # ESRP Malware scanning (when signing is enabled)
+ - ${{ if eq(parameters.signingEnabled, true) }}:
+ - task: EsrpMalwareScanning@5
+ displayName: 'ESRP MalwareScanning - Python Wheels'
+ inputs:
+ ConnectedServiceName: '$(SigningEsrpConnectedServiceName)'
+ AppRegistrationClientId: '$(SigningAppRegistrationClientId)'
+ AppRegistrationTenantId: '$(SigningAppRegistrationTenantId)'
+ EsrpClientId: '$(SigningEsrpClientId)'
+ UseMSIAuthentication: true
+ FolderPath: '$(ob_outputDirectory)/wheels'
+ Pattern: '*.whl'
+ SessionTimeout: 60
+ CleanupTempStorage: 1
+ VerboseLogin: 1
+
+ # ESRP Code Signing (DISABLED - wheel files cannot be signed with SignTool)
+ # See compound-esrp-code-signing-step.yml for detailed explanation of why this doesn't work
+ # - ${{ if eq(parameters.signingEnabled, true) }}:
+ # - template: /OneBranchPipelines/steps/compound-esrp-code-signing-step.yml@self
+ # parameters:
+ # appRegistrationClientId: '$(SigningAppRegistrationClientId)'
+ # appRegistrationTenantId: '$(SigningAppRegistrationTenantId)'
+ # artifactType: 'whl'
+ # authAkvName: '$(SigningAuthAkvName)'
+ # authSignCertName: '$(SigningAuthSignCertName)'
+ # esrpClientId: '$(SigningEsrpClientId)'
+ # esrpConnectedServiceName: '$(SigningEsrpConnectedServiceName)'
+ # signPath: '$(ob_outputDirectory)\wheels'
+
+ # Note: Symbol publishing moved to release pipeline
+ # Symbols are published as artifacts here and consumed in release pipeline
diff --git a/OneBranchPipelines/steps/compound-esrp-code-signing-step.yml b/OneBranchPipelines/steps/compound-esrp-code-signing-step.yml
new file mode 100644
index 00000000..62c9357f
--- /dev/null
+++ b/OneBranchPipelines/steps/compound-esrp-code-signing-step.yml
@@ -0,0 +1,210 @@
+'''
+ESRP Code Signing Step Template (DISABLED - Python wheels cannot be signed with SignTool)
+
+This template was originally designed to handle signing of binary artifacts using Enterprise Secure Release Process (ESRP).
+However, we discovered that Python wheel (.whl) files cannot be signed using Windows SignTool because:
+
+1. Python wheels are ZIP archive files, not PE format binaries
+2. Windows SignTool only supports PE format files (.exe, .dll, .sys, etc.)
+3. ZIP archives require different signing approaches (if supported at all)
+
+Error Messages Encountered:
+
+ESRP Error Log:
+"SignTool Error: This file format cannot be signed because it is not recognized."
+
+Full SignTool Command that Failed:
+sign /NPH /fd "SHA256" /f "..." /tr "..." /d "mssql-python" "...whl"
+
+Technical Details:
+- Certificate CP-230012 loads successfully and authentication works correctly
+- File upload to ESRP service works without issues
+- The failure occurs when SignTool attempts to process the .whl file
+- SignTool recognizes .whl as an unknown/unsupported format
+
+Alternative Approaches Considered:
+1. OneBranch signing (onebranch.pipeline.signing@1) - had authentication issues requiring interactive login
+2. Different ESRP operations - no ESRP operation exists for ZIP archive signing
+3. Signing individual files within wheels - would break wheel integrity and PyPI compatibility
+
+Conclusion:
+Python wheels distributed to PyPI are typically unsigned. The package integrity is verified through
+checksums and PyPIs own security mechanisms. Many popular Python packages on PyPI are not code-signed.
+
+This template is preserved for reference and potential future use if alternative signing approaches
+are identified or if other file types need to be signed.
+
+Original Configuration Details:
+CP-230012: "SHA256 Authenticode Standard Microsoft Corporation" certificate for external distribution
+Operation: SigntoolSign (Windows SignTool for PE format binaries only)
+Service Connection: Microsoft Release Management Internal
+
+Based on SqlClient ESRP signing implementation
+COMMENTED OUT - All ESRP signing tasks are disabled due to SignTool incompatibility with wheel files
+The code below is preserved for reference and potential future use with other file types
+'''
+# parameters:
+# - name: appRegistrationClientId
+# type: string
+# displayName: 'App Registration Client ID'
+#
+# - name: appRegistrationTenantId
+# type: string
+# displayName: 'App Registration Tenant ID'
+#
+# - name: artifactType
+# type: string
+# displayName: 'Artifact type to sign'
+# values:
+# - 'dll' # For .pyd, .so, .dylib files (native binaries)
+# - 'whl' # For .whl files (Python wheels)
+#
+# - name: authAkvName
+# type: string
+# displayName: 'Azure Key Vault name'
+#
+# - name: authSignCertName
+# type: string
+# displayName: 'Signing certificate name'
+#
+# - name: esrpClientId
+# type: string
+# displayName: 'ESRP Client ID'
+#
+# - name: esrpConnectedServiceName
+# type: string
+# displayName: 'ESRP Connected Service Name'
+#
+# - name: signPath
+# type: string
+# displayName: 'Path containing files to sign'
+
+# steps:
+# # Sign native binary files (.pyd, .so, .dylib)
+# - ${{ if eq(parameters.artifactType, 'dll') }}:
+# - task: EsrpCodeSigning@5
+# displayName: 'ESRP CodeSigning - Native Binaries'
+# inputs:
+# ConnectedServiceName: '${{ parameters.esrpConnectedServiceName }}'
+# AppRegistrationClientId: '${{ parameters.appRegistrationClientId }}'
+# AppRegistrationTenantId: '${{ parameters.appRegistrationTenantId }}'
+# EsrpClientId: '${{ parameters.esrpClientId }}'
+# UseMSIAuthentication: true
+# AuthAKVName: '${{ parameters.authAkvName }}'
+# AuthSignCertName: '${{ parameters.authSignCertName }}'
+# FolderPath: '${{ parameters.signPath }}'
+# Pattern: '*.pyd,*.dll,*.so,*.dylib'
+# signConfigType: inlineSignParams
+# inlineOperation: |
+# [
+# {
+# "keyCode": "CP-230012",
+# "operationSetCode": "SigntoolSign",
+# "parameters": [
+# {
+# "parameterName": "OpusName",
+# "parameterValue": "mssql-python"
+# },
+# {
+# "parameterName": "OpusInfo",
+# "parameterValue": "http://www.microsoft.com"
+# },
+# {
+# "parameterName": "FileDigest",
+# "parameterValue": "/fd \"SHA256\""
+# },
+# {
+# "parameterName": "PageHash",
+# "parameterValue": "/NPH"
+# },
+# {
+# "parameterName": "TimeStamp",
+# "parameterValue": "/tr \"http://rfc3161.gtm.corp.microsoft.com/TSS/HttpTspServer\" /td sha256"
+# }
+# ],
+# "toolName": "sign",
+# "toolVersion": "1.0"
+# },
+# {
+# "keyCode": "CP-230012",
+# "operationSetCode": "SigntoolVerify",
+# "parameters": [],
+# "toolName": "sign",
+# "toolVersion": "1.0"
+# }
+# ]
+#
+# # Sign Python wheel files (.whl)
+# - ${{ if eq(parameters.artifactType, 'whl') }}:
+# - task: EsrpCodeSigning@5
+# displayName: 'ESRP CodeSigning - Python Wheels'
+# inputs:
+# ConnectedServiceName: '${{ parameters.esrpConnectedServiceName }}'
+# AppRegistrationClientId: '${{ parameters.appRegistrationClientId }}'
+# AppRegistrationTenantId: '${{ parameters.appRegistrationTenantId }}'
+# EsrpClientId: '${{ parameters.esrpClientId }}'
+# UseMSIAuthentication: true
+# AuthAKVName: '${{ parameters.authAkvName }}'
+# AuthSignCertName: '${{ parameters.authSignCertName }}'
+# FolderPath: '${{ parameters.signPath }}'
+# Pattern: '*.whl'
+# signConfigType: inlineSignParams
+# inlineOperation: |
+# [
+# {
+# "keyCode": "CP-230012",
+# "operationSetCode": "SigntoolSign",
+# "parameters": [
+# {
+# "parameterName": "OpusName",
+# "parameterValue": "mssql-python"
+# },
+# {
+# "parameterName": "OpusInfo",
+# "parameterValue": "http://www.microsoft.com"
+# },
+# {
+# "parameterName": "FileDigest",
+# "parameterValue": "/fd \"SHA256\""
+# },
+# {
+# "parameterName": "PageHash",
+# "parameterValue": "/NPH"
+# },
+# {
+# "parameterName": "TimeStamp",
+# "parameterValue": "/tr \"http://rfc3161.gtm.corp.microsoft.com/TSS/HttpTspServer\" /td sha256"
+# }
+# ],
+# "toolName": "sign",
+# "toolVersion": "1.0"
+# },
+# {
+# "keyCode": "CP-230012",
+# "operationSetCode": "SigntoolVerify",
+# "parameters": [],
+# "toolName": "sign",
+# "toolVersion": "1.0"
+# }
+# ]
+#
+# # List signed files (platform-specific)
+# - ${{ if eq(parameters.artifactType, 'dll') }}:
+# # Windows - use cmd syntax
+# - script: |
+# echo Signed files in: ${{ parameters.signPath }}
+# dir /s /b "${{ parameters.signPath }}\*.whl" "${{ parameters.signPath }}\*.pyd" "${{ parameters.signPath }}\*.dll" 2>nul
+# displayName: 'List signed files (Windows)'
+# condition: succeededOrFailed()
+#
+# - ${{ else }}:
+# # Linux/macOS - use bash syntax
+# - bash: |
+# echo "Signed files in: ${{ parameters.signPath }}"
+# if [ -d "${{ parameters.signPath }}" ]; then
+# find "${{ parameters.signPath }}" -type f \( -name "*.whl" -o -name "*.pyd" -o -name "*.dll" -o -name "*.so" -o -name "*.dylib" \) -ls
+# else
+# echo "Directory not found: ${{ parameters.signPath }}"
+# fi
+# displayName: 'List signed files (Linux/macOS)'
+# condition: succeededOrFailed()
diff --git a/OneBranchPipelines/steps/malware-scanning-step.yml b/OneBranchPipelines/steps/malware-scanning-step.yml
new file mode 100644
index 00000000..bbba5d88
--- /dev/null
+++ b/OneBranchPipelines/steps/malware-scanning-step.yml
@@ -0,0 +1,28 @@
+# Malware Scanning Step Template
+# Scans artifacts for malware before signing/publishing
+parameters:
+ - name: scanPath
+ type: string
+ displayName: 'Path to scan for malware'
+
+ - name: artifactType
+ type: string
+ displayName: 'Type of artifact (dll, pkg)'
+ values:
+ - 'dll'
+ - 'pkg'
+
+steps:
+ - task: ComponentGovernanceComponentDetection@0
+ displayName: 'Component Governance Detection'
+ inputs:
+ scanType: 'Register'
+ verbosity: 'Verbose'
+ alertWarningLevel: 'High'
+
+ # AntiMalware scanning (OneBranch will inject this automatically via globalSdl)
+ # This step is a placeholder for visibility
+ - script: |
+ echo "Malware scanning for ${{ parameters.artifactType }} files in ${{ parameters.scanPath }}"
+ echo "OneBranch AntiMalware scanning will be performed automatically"
+ displayName: 'Malware Scan Notification (${{ parameters.artifactType }})'
diff --git a/OneBranchPipelines/steps/symbol-publishing-step.yml b/OneBranchPipelines/steps/symbol-publishing-step.yml
new file mode 100644
index 00000000..479c1c33
--- /dev/null
+++ b/OneBranchPipelines/steps/symbol-publishing-step.yml
@@ -0,0 +1,209 @@
+# Symbol Publishing Step Template
+# Publishes PDB symbols to Azure DevOps Symbol Server and Microsoft Symbol Publishing Service
+parameters:
+ - name: SymbolsFolder
+ type: string
+ default: '$(ob_outputDirectory)\symbols'
+
+steps:
+ # Set AccountName for SqlClientDrivers organization (separate PowerShell task like JDBC)
+ - task: PowerShell@2
+ displayName: 'Set Symbol.AccountName to SqlClientDrivers'
+ inputs:
+ targetType: inline
+ # NOTE: we're setting PAT in this step since Pat:$(System.AccessToken) doesn't work in PublishSymbols@2 task directly
+ # Tried using env: parameter on PublishSymbols@2 but it didn't work
+ # This is a workaround to set it via script, and setting as a secret variable
+ script: |
+ Write-Host "##vso[task.setvariable variable=ArtifactServices.Symbol.AccountName;]SqlClientDrivers"
+ Write-Host "##vso[task.setvariable variable=ArtifactServices.Symbol.Pat;issecret=true;]$env:SYSTEM_ACCESSTOKEN"
+ # Verify System.AccessToken is available
+ if (-not $env:SYSTEM_ACCESSTOKEN) {
+ Write-Error "SYSTEM_ACCESSTOKEN is not available. Ensure 'Allow scripts to access the OAuth token' is enabled in the pipeline settings."
+ } else {
+ Write-Host "SYSTEM_ACCESSTOKEN is available and will be used for symbol publishing."
+ }
+ env:
+ SYSTEM_ACCESSTOKEN: $(System.AccessToken)
+
+ - task: PublishSymbols@2
+ displayName: 'Push Symbols to SqlClientDrivers ADO Organization'
+ inputs:
+ SymbolsFolder: '${{ parameters.SymbolsFolder }}'
+ SearchPattern: '**/*.pdb'
+ IndexSources: false
+ SymbolServerType: TeamServices
+ SymbolsMaximumWaitTime: 10
+ SymbolsProduct: mssql-python
+ SymbolsVersion: $(Build.BuildId)
+
+ # Publish to Microsoft Symbol Publishing Service (External)
+ # This step finds the request name created by PublishSymbols@2 task above and publishes to internal/public servers
+ # The PublishSymbols@2 task uploads symbols and creates a request; this step marks it for publishing
+ #
+ # PREREQUISITES (Critical for avoiding 403 Forbidden errors):
+ # 1. Project must be registered with Symbol team via IcM incident (ICM 696470276 for mssql-python)
+ # 2. Service principal/identity used by azureSubscription must be added as Reader AND Publisher
+ # - Symbol team must explicitly grant this identity access to your project
+ # - 403 errors indicate the identity hasn't been added or wrong identity is being used
+ # 3. Verify identity matches: az account get-access-token will use the identity from azureSubscription
+ #
+ # Reference: https://www.osgwiki.com/wiki/Symbols_Publishing_Pipeline_to_SymWeb_and_MSDL#Step_3:_Project_Setup
+ - task: AzureCLI@2
+ displayName: 'Publish symbols to Microsoft Symbol Publishing Service'
+ condition: succeeded()
+ env:
+ SymbolServer: '$(SymbolServer)'
+ SymbolTokenUri: '$(SymbolTokenUri)'
+ inputs:
+ azureSubscription: 'SymbolsPublishing-msodbcsql-mssql-python'
+ scriptType: ps
+ scriptLocation: inlineScript
+ inlineScript: |
+ $symbolServer = $env:SymbolServer
+ $tokenUri = $env:SymbolTokenUri
+ $projectName = "mssql-python"
+
+ # Get the access token for the symbol publishing service
+ # This uses the identity from azureSubscription
+ # CRITICAL: The identity must be registered as Reader AND Publisher for the project
+ # Otherwise you'll get 403 Forbidden errors when calling the Symbol Publishing Service API
+ $symbolPublishingToken = az account get-access-token --resource $tokenUri --query accessToken -o tsv
+ echo "> 1.Symbol publishing token acquired."
+
+ # CRITICAL: We search build logs to find the auto-generated request name from PublishSymbols@2
+ # Two implementation patterns exist:
+ # 1. JDBC Pattern (used here): PublishSymbols@2 auto-generates request name → search logs → publish
+ # 2. SqlClient Pattern: Pass explicit symbolsArtifactName parameter → use same name → publish
+ # We use JDBC pattern because it's more flexible and doesn't require parameter coordination
+
+ # KEY LEARNING: Must use $(System.CollectionUri) for correct API URL construction
+ # $(System.CollectionUri) = full org URL like "https://dev.azure.com/SqlClientDrivers/"
+ # $(System.TeamProject) = only project name like "mssql-python"
+ # Previous error: Used "https://dev.azure.com/$(System.TeamProject)" which resolved to
+ # "https://dev.azure.com/mssql-python" (missing organization) → 404 error
+ echo "Searching for request name created by PublishSymbols@2 task..."
+ $logList = Invoke-RestMethod -Uri "$(System.CollectionUri)$(System.TeamProject)/_apis/build/builds/$(Build.BuildId)/logs?api-version=7.1" -Method GET -Headers @{ Authorization = "Bearer $(System.AccessToken)" } -ContentType "application/json"
+
+ # KEY LEARNING: Build API returns logs in the .value property, not .logs
+ # Previous error: Used $logList.logs → property not found
+ # Azure DevOps Build API schema: { "value": [ { "id": 1, ... }, ... ] }
+ $requestName = $null
+ $logList.value | ForEach-Object {
+ $id = $_.id
+ $log = Invoke-RestMethod -Uri "$(System.CollectionUri)$(System.TeamProject)/_apis/build/builds/$(Build.BuildId)/logs/$id" -Method GET -Headers @{ Authorization = "Bearer $(System.AccessToken)" } -ContentType "application/json"
+
+ echo $log > log.txt
+ # PublishSymbols@2 creates a request with pattern like: Request 'mssql-python/{branch}/{date}.{build}/{buildId}/{guid}'
+ # Example: Request 'mssql-python/official-release/25290.7-release/127537/23bc7689-7bae-4d13-8772-ae70c50b72df'
+ $request = Select-String -Path log.txt -Pattern "Request '.*'" -ErrorAction SilentlyContinue
+
+ if ($request -and $request -match "'mssql-python\/.*'") {
+ $requestName = (-Split $Matches[0])[0].Replace("'","")
+ echo "Found request name: $requestName"
+ }
+ }
+
+ if (-not $requestName) {
+ echo "##[error]Could not find request name in build logs. The PublishSymbols@2 task may have failed or not created a request."
+ exit 1
+ }
+
+ echo "> 2.Request name found from PublishSymbols@2 task."
+
+ # Register the request name with Symbol Publishing Service
+ # This is an idempotent operation - if already registered, API returns success
+ # KEY LEARNING: Use ConvertTo-Json for proper JSON formatting (not manual string construction)
+ # This ensures correct boolean values and escaping
+ echo "Registering the request name ..."
+ $requestNameRegistration = @{ requestName = $requestName }
+ $requestNameRegistrationBody = $requestNameRegistration | ConvertTo-Json -Compress
+ try {
+ Invoke-RestMethod -Method POST -Uri "https://$symbolServer.trafficmanager.net/projects/$projectName/requests" -Headers @{ Authorization = "Bearer $symbolPublishingToken" } -ContentType "application/json" -Body $requestNameRegistrationBody
+ echo "> 3.Registration of request name succeeded."
+ } catch {
+ echo "Registration may have already existed (this is okay): $($_.Exception.Message)"
+ }
+
+ # Publish the symbols to internal and public servers
+ # KEY LEARNING: This API call is asynchronous - it submits the request but doesn't wait for completion
+ # We need to poll the status endpoint (below) to confirm when publishing finishes
+ # Status codes: 0=NotRequested, 1=Submitted, 2=Processing, 3=Completed
+ # Result codes: 0=Pending, 1=Succeeded, 2=Failed, 3=Cancelled
+ echo "Publishing the symbols to internal and public servers..."
+ $publishSymbols = @{
+ publishToInternalServer = $true
+ publishToPublicServer = $true
+ }
+ $publishSymbolsBody = $publishSymbols | ConvertTo-Json -Compress
+ echo "Publishing symbols request body: $publishSymbolsBody"
+
+ try {
+ $response = Invoke-RestMethod -Method POST -Uri "https://$symbolServer.trafficmanager.net/projects/$projectName/requests/$requestName" -Headers @{ Authorization = "Bearer $symbolPublishingToken" } -ContentType "application/json" -Body $publishSymbolsBody
+ echo "> 4.Request to publish symbols succeeded."
+ echo "Response: $($response | ConvertTo-Json)"
+ } catch {
+ echo "##[error]Failed to publish symbols. Status Code: $($_.Exception.Response.StatusCode.value__)"
+ echo "##[error]Error Message: $($_.Exception.Message)"
+ if ($_.ErrorDetails.Message) {
+ echo "##[error]Error Details: $($_.ErrorDetails.Message)"
+ }
+ throw
+ }
+
+ echo "> 3.Request to publish symbols succeeded."
+
+ # Poll for publishing status until complete or timeout
+ # KEY LEARNING: Publishing is asynchronous - need to poll until Status=3 (Completed)
+ # Both internal and public servers must complete before we can confirm success
+ # Timeout after 5 minutes (30 attempts × 10 seconds) as a safety measure
+ echo "> 4.Checking the status of the request ..."
+ $maxAttempts = 30 # 30 attempts = ~5 minutes with 10 second intervals
+ $attemptCount = 0
+ $publishingComplete = $false
+
+ while (-not $publishingComplete -and $attemptCount -lt $maxAttempts) {
+ $attemptCount++
+ $status = Invoke-RestMethod -Method GET -Uri "https://$symbolServer.trafficmanager.net/projects/$projectName/requests/$requestName" -Headers @{ Authorization = "Bearer $symbolPublishingToken" } -ContentType "application/json"
+
+ echo "Attempt $attemptCount/$maxAttempts - Status Check:"
+ echo " Internal Server: Status=$($status.publishToInternalServerStatus), Result=$($status.publishToInternalServerResult)"
+ echo " Public Server: Status=$($status.publishToPublicServerStatus), Result=$($status.publishToPublicServerResult)"
+
+ # Wait for both servers to reach Status=3 (Completed)
+ # KEY LEARNING: Empty file arrays (filesBlockedFromPublicServer, filesPublishedAsPrivateSymbolsToPublicServer)
+ # are normal and expected - they populate only when there are blocked/private files
+ $internalDone = $status.publishToInternalServerStatus -eq 3
+ $publicDone = $status.publishToPublicServerStatus -eq 3
+
+ if ($internalDone -and $publicDone) {
+ $publishingComplete = $true
+ echo ""
+ echo "Publishing completed!"
+ echo " Internal Result: $($status.publishToInternalServerResult) (1=Success, 2=Failed)"
+ echo " Public Result: $($status.publishToPublicServerResult) (1=Success, 2=Failed)"
+
+ # Check for failures and report with detailed messages
+ if ($status.publishToInternalServerResult -eq 2) {
+ echo "##[warning]Internal server publishing failed: $($status.publishToInternalServerFailureMessage)"
+ }
+ if ($status.publishToPublicServerResult -eq 2) {
+ echo "##[warning]Public server publishing failed: $($status.publishToPublicServerFailureMessage)"
+ }
+
+ # Output final status for debugging
+ echo ""
+ echo "Final Status:"
+ $status | ConvertTo-Json
+ } else {
+ if ($attemptCount -lt $maxAttempts) {
+ echo " Still processing... waiting 10 seconds before next check"
+ Start-Sleep -Seconds 10
+ }
+ }
+ }
+
+ if (-not $publishingComplete) {
+ echo "##[warning]Publishing status check timed out after $maxAttempts attempts. Symbols may still be processing."
+ echo "You can check status manually at: https://$symbolServer.trafficmanager.net/projects/$projectName/requests/$requestName"
+ }
diff --git a/OneBranchPipelines/variables/build-variables.yml b/OneBranchPipelines/variables/build-variables.yml
new file mode 100644
index 00000000..d1d41f84
--- /dev/null
+++ b/OneBranchPipelines/variables/build-variables.yml
@@ -0,0 +1,35 @@
+# Build-specific variables
+variables:
+ # Build output directories
+ - name: DIST_PATH
+ value: '$(Build.SourcesDirectory)/dist'
+
+ - name: BINDINGS_PATH
+ value: '$(Build.SourcesDirectory)/mssql_python/pybind'
+
+ # Artifact output paths for OneBranch
+ - name: WHEELS_OUTPUT_PATH
+ value: '$(ob_outputDirectory)/wheels'
+
+ - name: BINDINGS_OUTPUT_PATH
+ value: '$(ob_outputDirectory)/bindings'
+
+ - name: SYMBOLS_OUTPUT_PATH
+ value: '$(ob_outputDirectory)/symbols'
+
+ # Build tools
+ - name: CMAKE_VERSION
+ value: 'latest'
+
+ - name: PYBIND11_VERSION
+ value: 'latest'
+
+ # Architecture support
+ - name: WINDOWS_ARCHITECTURES
+ value: 'x64,arm64'
+
+ - name: MACOS_ARCHITECTURES
+ value: 'universal2'
+
+ - name: LINUX_ARCHITECTURES
+ value: 'x86_64,aarch64'
diff --git a/OneBranchPipelines/variables/common-variables.yml b/OneBranchPipelines/variables/common-variables.yml
new file mode 100644
index 00000000..3597f419
--- /dev/null
+++ b/OneBranchPipelines/variables/common-variables.yml
@@ -0,0 +1,25 @@
+# Common variables used across all pipelines
+variables:
+ # Repository root path
+ - name: REPO_ROOT
+ value: $(Build.SourcesDirectory)
+ readonly: true
+
+ # Artifact staging paths
+ - name: ARTIFACT_PATH
+ value: $(Build.ArtifactStagingDirectory)
+ readonly: true
+
+ # Build configuration
+ - name: BUILD_CONFIGURATION
+ value: 'Release'
+
+ # Python versions to build
+ - name: PYTHON_VERSIONS
+ value: '3.10,3.11,3.12,3.13'
+
+ # Package name
+ - name: PACKAGE_NAME
+ value: 'mssql-python'
+ readonly: true
+
diff --git a/OneBranchPipelines/variables/onebranch-variables.yml b/OneBranchPipelines/variables/onebranch-variables.yml
new file mode 100644
index 00000000..71f31037
--- /dev/null
+++ b/OneBranchPipelines/variables/onebranch-variables.yml
@@ -0,0 +1,22 @@
+# OneBranch-specific variables
+variables:
+ # OneBranch output directory for automatic artifact publishing
+ # All artifacts placed here are automatically published by OneBranch
+ - name: ob_outputDirectory
+ value: '$(ARTIFACT_PATH)'
+
+ # OneBranch SDL configuration
+ - name: ob_sdl_enabled
+ value: true
+
+ # OneBranch symbol publishing
+ - name: ob_symbolsPublishing_enabled
+ value: true
+
+ # OneBranch TSA (Threat and Security Assessment) enabled for Official builds only
+ - name: ob_tsa_enabled
+ value: true
+
+ # Windows host version for OneBranch
+ - name: ob_windows_host_version
+ value: '2022'
diff --git a/OneBranchPipelines/variables/signing-variables.yml b/OneBranchPipelines/variables/signing-variables.yml
new file mode 100644
index 00000000..88c58e9f
--- /dev/null
+++ b/OneBranchPipelines/variables/signing-variables.yml
@@ -0,0 +1,32 @@
+# ESRP Code Signing Variables
+# These variables map from the 'ESRP Federated Creds (AME)' variable group
+# to the naming convention expected by OneBranch ESRP signing tasks
+# Required variable group: 'ESRP Federated Creds (AME)'
+variables:
+ # Map ESRP variable group names to OneBranch signing variable names
+ # Note: The source variable group uses different naming (without 'Signing' prefix)
+
+ # ESRP App Registration for authentication
+ - name: SigningAppRegistrationClientId
+ value: $(EsrpClientId) # Maps from EsrpClientId in variable group
+
+ - name: SigningAppRegistrationTenantId
+ value: $(DomainTenantId) # Maps from DomainTenantId in variable group
+
+ # Azure Key Vault for signing certificates
+ - name: SigningAuthAkvName
+ value: $(AuthAKVName) # Maps from AuthAKVName in variable group
+
+ - name: SigningAuthSignCertName
+ value: $(AuthSignCertName) # Maps from AuthSignCertName in variable group
+
+ # ESRP client configuration
+ - name: SigningEsrpClientId
+ value: $(EsrpClientId) # Maps from EsrpClientId in variable group
+
+ - name: SigningEsrpConnectedServiceName
+ value: $(ESRPConnectedServiceName) # Maps from ESRPConnectedServiceName in variable group
+
+ # Signing operation codes (for reference - actual operations defined in step template)
+ # Native binary files (.pyd, .so, .dylib) use: SigntoolSign with CP-230012
+ # Python wheel files (.whl) use: NuGetSign with CP-401405
diff --git a/OneBranchPipelines/variables/symbol-variables.yml b/OneBranchPipelines/variables/symbol-variables.yml
new file mode 100644
index 00000000..8946e80e
--- /dev/null
+++ b/OneBranchPipelines/variables/symbol-variables.yml
@@ -0,0 +1,18 @@
+# Symbol Publishing Variables
+# These variables configure where debug symbols (.pdb files) are published
+variables:
+ # Symbol paths for ApiScan
+ # Must use Build.SourcesDirectory (not ob_outputDirectory) so files persist for globalSdl
+ # Files are copied here during build stages, before ApiScan runs
+ # CRITICAL: Must use backslashes to match Build.SourcesDirectory's Windows path format
+ # When Build.SourcesDirectory resolves to D:\a\_work\1\s, we append \apiScan\dlls
+ - name: apiScanDllPath
+ value: '$(Build.SourcesDirectory)\apiScan\dlls'
+
+ - name: apiScanPdbPath
+ value: '$(Build.SourcesDirectory)\apiScan\pdbs'
+
+ # Symbol server variables come from 'Symbols Publishing' variable group:
+ # - SymbolServer: Symbol publishing server hostname
+ # - SymbolTokenUri: Token URI for symbol publishing service authentication
+
diff --git a/es-metadata.yml b/es-metadata.yml
new file mode 100644
index 00000000..d703279d
--- /dev/null
+++ b/es-metadata.yml
@@ -0,0 +1,8 @@
+schemaVersion: 0.0.1
+isProduction: true
+accountableOwners:
+ service: ae66a2ba-2c8a-4e77-8323-305cfad11f0e
+routing:
+ defaultAreaPath:
+ org: sqlclientdrivers
+ path: mssql-python
diff --git a/tests/test_003_connection.py b/tests/test_003_connection.py
index d7c62ef8..8db506bc 100644
--- a/tests/test_003_connection.py
+++ b/tests/test_003_connection.py
@@ -21,6 +21,7 @@
from mssql_python.exceptions import InterfaceError, ProgrammingError, DatabaseError
import mssql_python
+import sys
import pytest
import time
from mssql_python import connect, Connection, SQL_CHAR, SQL_WCHAR
@@ -7823,7 +7824,6 @@ def test_set_attr_current_catalog_after_connect(db_connection, conn_str):
# Skip this test for Azure SQL Database - it doesn't support changing database after connection
if is_azure_sql_connection(conn_str):
pytest.skip("Skipping for Azure SQL - SQL_ATTR_CURRENT_CATALOG not supported after connection")
-
# Get current database name
cursor = db_connection.cursor()
cursor.execute("SELECT DB_NAME()")
diff --git a/tests/test_010_connection_string_parser.py b/tests/test_010_connection_string_parser.py
index 293f0a59..4a90e2fc 100644
--- a/tests/test_010_connection_string_parser.py
+++ b/tests/test_010_connection_string_parser.py
@@ -126,13 +126,13 @@ def test_parse_special_chars_in_simple_value(self):
def test_parse_complex_connection_string(self):
"""Test parsing a complex realistic connection string."""
parser = _ConnectionStringParser()
- conn_str = "Server=tcp:server.database.windows.net,1433;Database=mydb;UID=user@server;PWD={p@ss;w}}rd};Encrypt=yes"
+ conn_str = "Server=tcp:server.database.windows.net,1433;Database=mydb;UID=user@server;PWD={TestP@ss;w}}rd};Encrypt=yes"
result = parser._parse(conn_str)
assert result == {
'server': 'tcp:server.database.windows.net,1433',
'database': 'mydb',
'uid': 'user@server',
- 'pwd': 'p@ss;w}rd', # }} escapes to single }
+ 'pwd': 'TestP@ss;w}rd', # }} escapes to single }
'encrypt': 'yes'
}
diff --git a/tests/test_012_connection_string_integration.py b/tests/test_012_connection_string_integration.py
index 32b27f0f..21c5ef8f 100644
--- a/tests/test_012_connection_string_integration.py
+++ b/tests/test_012_connection_string_integration.py
@@ -143,7 +143,7 @@ def test_parse_filter_build_complex_realistic(self):
# Parse
parser = _ConnectionStringParser()
# Note: Connection Timeout is not in the restricted allowlist
- conn_str = "Server=tcp:server.database.windows.net,1433;Database=mydb;UID=user@server;PWD={P@ss;w}}rd};Encrypt=yes;TrustServerCertificate=no"
+ conn_str = "Server=tcp:server.database.windows.net,1433;Database=mydb;UID=user@server;PWD={TestP@ss;w}}rd};Encrypt=yes;TrustServerCertificate=no"
parsed = parser._parse(conn_str)
# Filter
@@ -160,7 +160,7 @@ def test_parse_filter_build_complex_realistic(self):
assert 'Server=tcp:server.database.windows.net,1433' in result
assert 'Database=mydb' in result
assert 'UID=user@server' in result # UID not Uid (canonical form)
- assert 'PWD={P@ss;w}}rd}' in result
+ assert 'PWD={TestP@ss;w}}rd}' in result
assert 'Encrypt=yes' in result
assert 'TrustServerCertificate=no' in result
# Connection Timeout not in result (filtered out)