From fc6e91fbed66ccff43874df6b463b061ee706be7 Mon Sep 17 00:00:00 2001 From: Oliver Strickson Date: Thu, 23 Apr 2020 18:01:00 +0100 Subject: [PATCH 001/155] (broken) Set up two GitLab VMs, "internal" and "external", for code ingress, within cw20/Test A sandbox Progress during call on 2020-04-23 Co-authored-by: jack89roberts Co-authored-by: jemrobinson Co-authored-by: nbarlowATI --- deployment/common/Configuration.psm1 | 28 +++++-- .../setup/Setup_SRE_WebApp_Servers.ps1 | 74 ++++++++++++++----- .../full/sre_mortestsandbox_full_config.json | 28 +++++-- 3 files changed, 103 insertions(+), 27 deletions(-) diff --git a/deployment/common/Configuration.psm1 b/deployment/common/Configuration.psm1 index 2b9ce476de..a2958f8333 100644 --- a/deployment/common/Configuration.psm1 +++ b/deployment/common/Configuration.psm1 @@ -340,12 +340,20 @@ function Add-SreConfig { prefix = "${sreBasePrefix}.$([int]$sreThirdOctet + 3)" nsg = "dbingress" } + airlock = [ordered]@{ + name = "AirlockSubnet" + prefix = "${sreBasePrefix}.$([int]$sreThirdOctet + 4)" + nsg = "airlock" + } } nsg = [ordered]@{ data = [ordered]@{} dbingress = [ordered]@{ name = "NSG_SRE_$($config.sre.id)_DB_INGRESS".ToUpper() } + airlock = [ordered]@{ + name = "NSG_SRE_$($config.sre.id)_AIRLOCK".ToUpper() + } } } # Construct the CIDR for each subnet based on the prefix @@ -353,6 +361,7 @@ function Add-SreConfig { $config.sre.network.subnets.rds.cidr = "$($config.sre.network.subnets.rds.prefix).0/24" $config.sre.network.subnets.data.cidr = "$($config.sre.network.subnets.data.prefix).0/24" $config.sre.network.subnets.dbingress.cidr = "$($config.sre.network.subnets.dbingress.prefix).0/24" + $config.sre.network.subnets.airlock.cidr = "$($config.sre.network.subnets.airlock.prefix).0/24" # --- Storage config -- $storageRg = "RG_SRE_ARTIFACTS" @@ -497,17 +506,26 @@ function Add-SreConfig { rg = "RG_SRE_WEBAPPS" nsg = "NSG_SRE_$($config.sre.id)_WEBAPPS".ToUpper() gitlab = [ordered]@{ - vmName = "GITLAB-SRE-$($config.sre.id)".ToUpper() - vmSize = "Standard_D2s_v3" + internal = [ordered]@{ + vmName = "GITLAB-INTERNAL-SRE-$($config.sre.id)".ToUpper() + vmSize = "Standard_D2s_v3" + } + external = [ordered]@{ + vmName = "GITLAB-EXTERNAL-SRE-$($config.sre.id)".ToUpper() + vmSize = "Standard_D2s_v3" + } } hackmd = [ordered]@{ vmName = "HACKMD-SRE-$($config.sre.id)".ToUpper() vmSize = "Standard_D2s_v3" } } - $config.sre.webapps.gitlab.hostname = $config.sre.webapps.gitlab.vmName - $config.sre.webapps.gitlab.fqdn = "$($config.sre.webapps.gitlab.hostname).$($config.shm.domain.fqdn)" - $config.sre.webapps.gitlab.ip = "$($config.sre.network.subnets.data.prefix).151" + $config.sre.webapps.gitlab.internal.hostname = $config.sre.webapps.gitlab.internal.vmName + $config.sre.webapps.gitlab.internal.fqdn = "$($config.sre.webapps.gitlab.internal.hostname).$($config.shm.domain.fqdn)" + $config.sre.webapps.gitlab.internal.ip = "$($config.sre.network.subnets.data.prefix).151" + $config.sre.webapps.gitlab.external.hostname = $config.sre.webapps.gitlab.external.vmName + $config.sre.webapps.gitlab.external.fqdn = "$($config.sre.webapps.gitlab.external.hostname).$($config.shm.domain.fqdn)" + $config.sre.webapps.gitlab.external.ip = "$($config.sre.network.subnets.airlock.prefix).151" $config.sre.webapps.hackmd.hostname = $config.sre.webapps.hackmd.vmName $config.sre.webapps.hackmd.fqdn = "$($config.sre.webapps.hackmd.hostname).$($config.shm.domain.fqdn)" $config.sre.webapps.hackmd.ip = "$($config.sre.network.subnets.data.prefix).152" diff --git a/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 b/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 index 5358425367..cb6d362b26 100644 --- a/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 +++ b/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 @@ -31,8 +31,8 @@ $hackmdLdapPassword = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.nam # Set up the NSG for the webapps # ------------------------------ -$nsg = Deploy-NetworkSecurityGroup -Name $config.sre.webapps.nsg -ResourceGroupName $config.sre.network.vnet.rg -Location $config.sre.location -Add-NetworkSecurityGroupRule -NetworkSecurityGroup $nsg ` +$nsgGitlabInternal = Deploy-NetworkSecurityGroup -Name $config.sre.webapps.nsg -ResourceGroupName $config.sre.network.vnet.rg -Location $config.sre.location +Add-NetworkSecurityGroupRule -NetworkSecurityGroup $nsgGitlabInternal ` -Name "OutboundDenyInternet" ` -Description "Outbound deny internet" ` -Priority 4000 ` @@ -40,11 +40,22 @@ Add-NetworkSecurityGroupRule -NetworkSecurityGroup $nsg ` -SourceAddressPrefix VirtualNetwork -SourcePortRange * ` -DestinationAddressPrefix Internet -DestinationPortRange * +$nsgGitlabExternal = Deploy-NetworkSecurityGroup -Name $config.sre.network.nsg.airlock.name -ResourceGroupName $config.sre.network.vnet.rg -Location $config.sre.location + + +# Check that VNET and subnet exist +# -------------------------------- + +$vnet = Deploy-VirtualNetwork -Name $config.sre.network.vnet.Name -ResourceGroupName $config.sre.network.vnet.rg -AddressPrefix $config.sre.network.vnet.cidr -Location $config.sre.location +$subnet = Deploy-Subnet -Name $config.sre.network.subnets.airlock.Name -VirtualNetwork $vnet -AddressPrefix $config.sre.network.subnets.airlock.cidr + +Set-SubnetNetworkSecurityGroup -Subnet $subnet -NetworkSecurityGroup $nsgGitlabExternal -VirtualNetwork $vnet + # Expand GitLab cloudinit # ----------------------- $shmDcFqdn = ($config.shm.dc.hostname + "." + $config.shm.domain.fqdn) -$gitlabFqdn = $config.sre.webapps.gitlab.hostname + "." + $config.sre.domain.fqdn +$gitlabFqdn = $config.sre.webapps.gitlab.internal.hostname + "." + $config.sre.domain.fqdn $gitlabLdapUserDn = "CN=" + $config.sre.users.ldap.gitlab.name + "," + $config.shm.domain.serviceOuPath $gitlabUserFilter = "(&(objectClass=user)(memberOf=CN=" + $config.sre.domain.securityGroups.researchUsers.name + "," + $config.shm.domain.securityOuPath + "))" $gitlabCloudInitTemplate = Join-Path $PSScriptRoot ".." "cloud_init" "cloud-init-gitlab.template.yaml" | Get-Item | Get-Content -Raw @@ -53,8 +64,8 @@ $gitlabCloudInit = $gitlabCloudInitTemplate.Replace('', $shmDcFq Replace('',$gitlabLdapPassword). Replace('',$config.shm.domain.userOuPath). Replace('',$gitlabUserFilter). - Replace('',$config.sre.webapps.gitlab.ip). - Replace('',$config.sre.webapps.gitlab.hostname). + Replace('',$config.sre.webapps.gitlab.internal.ip). + Replace('',$config.sre.webapps.gitlab.internal.hostname). Replace('',$gitlabFqdn). Replace('',$gitlabRootPassword). Replace('',$config.shm.domain.fqdn) @@ -87,6 +98,36 @@ $hackmdCloudInitEncoded = [System.Convert]::ToBase64String([System.Text.Encoding $_ = Deploy-ResourceGroup -Name $config.sre.webapps.rg -Location $config.sre.location +# Deploy NIC and data disks for gitlab.external +# --------------------------------------------- + +$vmName = $config.sre.webapps.gitlab.external.vmName +$vmIpAddress = $config.sre.webapps.gitlab.external.ip +$vmNic = Deploy-VirtualMachineNIC -Name "$vmName-NIC" -ResourceGroupName $config.sre.webapps.rg -Subnet $subnet -PrivateIpAddress $vmIpAddress -Location $config.sre.location + + +# Deploy the VM +# ------------- + +$bootDiagnosticsAccount = Deploy-StorageAccount -Name $config.sre.storage.bootdiagnostics.accountName -ResourceGroupName $config.sre.storage.bootdiagnostics.rg -Location $config.sre.location +$cloudInitYaml = "" + +$params = @{ + Name = $vmName + Size = $config.sre.webapps.gitlab.external.vmSize + AdminPassword = $sreAdminPassword + AdminUsername = $sreAdminUsername + BootDiagnosticsAccount = $bootDiagnosticsAccount + CloudInitYaml = $cloudInitYaml + location = $config.sre.location + NicId = $vmNic.Id + OsDiskType = "Standard_LRS" + ResourceGroupName = $config.sre.webapps.rg + ImageSku = "18.04-LTS" +} +$_ = Deploy-UbuntuVirtualMachine @params + + # Deploy GitLab/HackMD VMs from template # -------------------------------------- Add-LogMessage -Level Info "Deploying GitLab/HackMD VMs from template..." @@ -95,9 +136,9 @@ $params = @{ Administrator_User = $sreAdminUsername BootDiagnostics_Account_Name = $config.sre.storage.bootdiagnostics.accountName GitLab_Cloud_Init = $gitlabCloudInitEncoded - GitLab_IP_Address = $config.sre.webapps.gitlab.ip - GitLab_Server_Name = $config.sre.webapps.gitlab.vmName - GitLab_VM_Size = $config.sre.webapps.gitlab.vmSize + GitLab_IP_Address = $config.sre.webapps.gitlab.internal.ip + GitLab_Server_Name = $config.sre.webapps.gitlab.internal.vmName + GitLab_VM_Size = $config.sre.webapps.gitlab.internal.vmSize HackMD_Cloud_Init = $hackmdCloudInitEncoded HackMD_IP_Address = $config.sre.webapps.hackmd.ip HackMD_Server_Name = $config.sre.webapps.hackmd.vmName @@ -113,32 +154,31 @@ Deploy-ArmTemplate -TemplatePath (Join-Path $PSScriptRoot ".." "arm_templates" " # ----------------------------------------------- Add-LogMessage -Level Info "Waiting for cloud-init provisioning to finish (this will take 5+ minutes)..." $progress = 0 -$gitlabStatuses = (Get-AzVM -Name $config.sre.webapps.gitlab.vmName -ResourceGroupName $config.sre.webapps.rg -Status).Statuses.Code +$gitlabStatuses = (Get-AzVM -Name $config.sre.webapps.gitlab.internal.vmName -ResourceGroupName $config.sre.webapps.rg -Status).Statuses.Code $hackmdStatuses = (Get-AzVM -Name $config.sre.webapps.hackmd.vmName -ResourceGroupName $config.sre.webapps.rg -Status).Statuses.Code while (-Not ($gitlabStatuses.Contains("ProvisioningState/succeeded") -and $gitlabStatuses.Contains("PowerState/stopped") -and $hackmdStatuses.Contains("ProvisioningState/succeeded") -and $hackmdStatuses.Contains("PowerState/stopped"))) { $progress = [math]::min(100, $progress + 1) - $gitlabStatuses = (Get-AzVM -Name $config.sre.webapps.gitlab.vmName -ResourceGroupName $config.sre.webapps.rg -Status).Statuses.Code + $gitlabStatuses = (Get-AzVM -Name $config.sre.webapps.gitlab.internal.vmName -ResourceGroupName $config.sre.webapps.rg -Status).Statuses.Code $hackmdStatuses = (Get-AzVM -Name $config.sre.webapps.hackmd.vmName -ResourceGroupName $config.sre.webapps.rg -Status).Statuses.Code - Write-Progress -Activity "Deployment status:" -Status "GitLab [$($gitlabStatuses[0]) $($gitlabStatuses[1])], HackMD [$($hackmdStatuses[0]) $($hackmdStatuses[1])]" -PercentComplete $progress + Write-Progress -Activity "Deployment status:" -Status "GitLab Internal [$($gitlabStatuses[0]) $($gitlabStatuses[1])], HackMD [$($hackmdStatuses[0]) $($hackmdStatuses[1])]" -PercentComplete $progress Start-Sleep 10 } - # While webapp servers are off, ensure they are bound to correct NSG # ------------------------------------------------------------------ Add-LogMessage -Level Info "Ensure webapp servers and compute VMs are bound to correct NSG..." -foreach ($vmName in ($config.sre.webapps.hackmd.vmName, $config.sre.webapps.gitlab.vmName)) { - Add-VmToNSG -VMName $vmName -NSGName $nsg.Name +foreach ($vmName in ($config.sre.webapps.hackmd.vmName, $config.sre.webapps.gitlab.internal.vmName)) { + Add-VmToNSG -VMName $vmName -NSGName $nsgGitlabInternal.Name } Start-Sleep -Seconds 30 -Add-LogMessage -Level Info "Summary: NICs associated with '$($nsg.Name)' NSG" -@($nsg.NetworkInterfaces) | ForEach-Object { Add-LogMessage -Level Info "=> $($_.Id.Split('/')[-1])" } +Add-LogMessage -Level Info "Summary: NICs associated with '$($nsgGitlabInternal.Name)' NSG" +@($nsgGitlabInternal.NetworkInterfaces) | ForEach-Object { Add-LogMessage -Level Info "=> $($_.Id.Split('/')[-1])" } # Finally, reboot the webapp servers # ---------------------------------- -foreach ($nameVMNameParamsPair in (("HackMD", $config.sre.webapps.hackmd.vmName), ("GitLab", $config.sre.webapps.gitlab.vmName))) { +foreach ($nameVMNameParamsPair in (("HackMD", $config.sre.webapps.hackmd.vmName), ("GitLab", $config.sre.webapps.gitlab.internal.vmName))) { $name, $vmName = $nameVMNameParamsPair Add-LogMessage -Level Info "Rebooting the $name VM: '$vmName'" Enable-AzVM -Name $vmName -ResourceGroupName $config.sre.webapps.rg diff --git a/environment_configs/full/sre_mortestsandbox_full_config.json b/environment_configs/full/sre_mortestsandbox_full_config.json index 49036abf96..96fddead66 100644 --- a/environment_configs/full/sre_mortestsandbox_full_config.json +++ b/environment_configs/full/sre_mortestsandbox_full_config.json @@ -182,12 +182,21 @@ "prefix": "10.150.11", "nsg": "dbingress", "cidr": "10.150.11.0/24" + }, + "airlock": { + "name": "AirlockSubnet", + "prefix": "10.150.12", + "nsg": "airlock", + "cidr": "10.150.12.0/24" } }, "nsg": { "data": {}, "dbingress": { "name": "NSG_SRE_SANDBOX_DB_INGRESS" + }, + "airlock": { + "name": "NSG_SRE_SANDBOX_AIRLOCK" } } }, @@ -300,11 +309,20 @@ "rg": "RG_SRE_WEBAPPS", "nsg": "NSG_SRE_SANDBOX_WEBAPPS", "gitlab": { - "vmName": "GITLAB-SRE-SANDBOX", - "vmSize": "Standard_D2s_v3", - "hostname": "GITLAB-SRE-SANDBOX", - "fqdn": "GITLAB-SRE-SANDBOX.mortest.dsgroupdev.co.uk", - "ip": "10.150.10.151" + "internal": { + "vmName": "GITLAB-INTERNAL-SRE-SANDBOX", + "vmSize": "Standard_D2s_v3", + "hostname": "GITLAB-INTERNAL-SRE-SANDBOX", + "fqdn": "GITLAB-INTERNAL-SRE-SANDBOX.mortest.dsgroupdev.co.uk", + "ip": "10.150.10.151" + }, + "external": { + "vmName": "GITLAB-EXTERNAL-SRE-SANDBOX", + "vmSize": "Standard_D2s_v3", + "hostname": "GITLAB-EXTERNAL-SRE-SANDBOX", + "fqdn": "GITLAB-EXTERNAL-SRE-SANDBOX.mortest.dsgroupdev.co.uk", + "ip": "10.150.12.151" + } }, "hackmd": { "vmName": "HACKMD-SRE-SANDBOX", From 3c0aecd37ba970dd6ea88546c94252dcb8f83682 Mon Sep 17 00:00:00 2001 From: Jack Roberts Date: Fri, 24 Apr 2020 12:36:41 +0100 Subject: [PATCH 002/155] Add a basic cloud init template for gitlab external Co-authored-by: jemrobinson Co-authored-by: nbarlowATI Co-authored-by: ots22 --- .../cloud-init-gitlab-external.template.yaml | 18 ++++++++++++ ... cloud-init-gitlab-internal.template.yaml} | 0 .../setup/Setup_SRE_WebApp_Servers.ps1 | 28 +++++++++++++++---- 3 files changed, 40 insertions(+), 6 deletions(-) create mode 100644 deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml rename deployment/secure_research_environment/cloud_init/{cloud-init-gitlab.template.yaml => cloud-init-gitlab-internal.template.yaml} (100%) diff --git a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml new file mode 100644 index 0000000000..eb22e2a12e --- /dev/null +++ b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml @@ -0,0 +1,18 @@ +#cloud-config +package_update: true +package_upgrade: true + +# Install LDAP tools for debugging LDAP issues +packages: + - git + +runcmd: + # Configure server + - echo "Configuring server" + +# Shutdown so that we can tell when the job has finished by polling the VM state +power_state: + mode: poweroff + message: "Shutting down as a signal that setup is finished" + timeout: 30 + condition: true diff --git a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab.template.yaml b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-internal.template.yaml similarity index 100% rename from deployment/secure_research_environment/cloud_init/cloud-init-gitlab.template.yaml rename to deployment/secure_research_environment/cloud_init/cloud-init-gitlab-internal.template.yaml diff --git a/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 b/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 index cb6d362b26..f096d3396a 100644 --- a/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 +++ b/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 @@ -58,7 +58,7 @@ $shmDcFqdn = ($config.shm.dc.hostname + "." + $config.shm.domain.fqdn) $gitlabFqdn = $config.sre.webapps.gitlab.internal.hostname + "." + $config.sre.domain.fqdn $gitlabLdapUserDn = "CN=" + $config.sre.users.ldap.gitlab.name + "," + $config.shm.domain.serviceOuPath $gitlabUserFilter = "(&(objectClass=user)(memberOf=CN=" + $config.sre.domain.securityGroups.researchUsers.name + "," + $config.shm.domain.securityOuPath + "))" -$gitlabCloudInitTemplate = Join-Path $PSScriptRoot ".." "cloud_init" "cloud-init-gitlab.template.yaml" | Get-Item | Get-Content -Raw +$gitlabCloudInitTemplate = Join-Path $PSScriptRoot ".." "cloud_init" "cloud-init-gitlab-internal.template.yaml" | Get-Item | Get-Content -Raw $gitlabCloudInit = $gitlabCloudInitTemplate.Replace('', $shmDcFqdn). Replace('', $gitlabLdapUserDn). Replace('',$gitlabLdapPassword). @@ -106,11 +106,15 @@ $vmIpAddress = $config.sre.webapps.gitlab.external.ip $vmNic = Deploy-VirtualMachineNIC -Name "$vmName-NIC" -ResourceGroupName $config.sre.webapps.rg -Subnet $subnet -PrivateIpAddress $vmIpAddress -Location $config.sre.location -# Deploy the VM -# ------------- +# Deploy the GitLab external VM +# ------------------------------ $bootDiagnosticsAccount = Deploy-StorageAccount -Name $config.sre.storage.bootdiagnostics.accountName -ResourceGroupName $config.sre.storage.bootdiagnostics.rg -Location $config.sre.location -$cloudInitYaml = "" +$gitlabExternalCloudInitTemplate = Join-Path $PSScriptRoot ".." "cloud_init" "cloud-init-gitlab-external.template.yaml" | Get-Item | Get-Content -Raw +$gitlabExternalCloudInitEncoded = [System.Convert]::ToBase64String([System.Text.Encoding]::UTF8.GetBytes($gitlabExternalCloudInitTemplate)) + +Write-Host "CHECKING:" +Write-Host $gitlabExternalCloudInitTemplate $params = @{ Name = $vmName @@ -118,7 +122,7 @@ $params = @{ AdminPassword = $sreAdminPassword AdminUsername = $sreAdminUsername BootDiagnosticsAccount = $bootDiagnosticsAccount - CloudInitYaml = $cloudInitYaml + CloudInitYaml = $gitlabExternalCloudInitTemplate location = $config.sre.location NicId = $vmNic.Id OsDiskType = "Standard_LRS" @@ -127,6 +131,19 @@ $params = @{ } $_ = Deploy-UbuntuVirtualMachine @params +Add-LogMessage -Level Info "Waiting for cloud-init provisioning to finish (this will take 5+ minutes)..." +$progress = 0 +$gitlabStatuses = (Get-AzVM -Name $config.sre.webapps.gitlab.external.vmName -ResourceGroupName $config.sre.webapps.rg -Status).Statuses.Code +while (-Not ($gitlabStatuses.Contains("ProvisioningState/succeeded") -and $gitlabStatuses.Contains("PowerState/stopped"))) { + $progress = [math]::min(100, $progress + 1) + $gitlabStatuses = (Get-AzVM -Name $config.sre.webapps.gitlab.external.vmName -ResourceGroupName $config.sre.webapps.rg -Status).Statuses.Code + Write-Progress -Activity "Deployment status:" -Status "GitLab External [$($gitlabStatuses[0]) $($gitlabStatuses[1])]" -PercentComplete $progress + Start-Sleep 10 +} + +Add-LogMessage -Level Info "Rebooting the $name VM: '$vmName'" +Enable-AzVM -Name $vmName -ResourceGroupName $config.sre.webapps.rg + # Deploy GitLab/HackMD VMs from template # -------------------------------------- @@ -189,7 +206,6 @@ foreach ($nameVMNameParamsPair in (("HackMD", $config.sre.webapps.hackmd.vmName) } } - # Switch back to original subscription # ------------------------------------ $_ = Set-AzContext -Context $originalContext; From 6056af122d1adcfdce2af49a8595dd73340154b6 Mon Sep 17 00:00:00 2001 From: Jack Roberts Date: Fri, 24 Apr 2020 12:55:01 +0100 Subject: [PATCH 003/155] Remove debugging print statement and unused variable --- .../setup/Setup_SRE_WebApp_Servers.ps1 | 4 ---- 1 file changed, 4 deletions(-) diff --git a/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 b/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 index f096d3396a..64b98f0af7 100644 --- a/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 +++ b/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 @@ -111,10 +111,6 @@ $vmNic = Deploy-VirtualMachineNIC -Name "$vmName-NIC" -ResourceGroupName $config $bootDiagnosticsAccount = Deploy-StorageAccount -Name $config.sre.storage.bootdiagnostics.accountName -ResourceGroupName $config.sre.storage.bootdiagnostics.rg -Location $config.sre.location $gitlabExternalCloudInitTemplate = Join-Path $PSScriptRoot ".." "cloud_init" "cloud-init-gitlab-external.template.yaml" | Get-Item | Get-Content -Raw -$gitlabExternalCloudInitEncoded = [System.Convert]::ToBase64String([System.Text.Encoding]::UTF8.GetBytes($gitlabExternalCloudInitTemplate)) - -Write-Host "CHECKING:" -Write-Host $gitlabExternalCloudInitTemplate $params = @{ Name = $vmName From 8b8d20c0eff27000366269537710a7e2709986de Mon Sep 17 00:00:00 2001 From: nbarlowATI Date: Mon, 27 Apr 2020 09:22:08 +0100 Subject: [PATCH 004/155] add network security group rule (InboundDenyAll) for nsgGitlabExternal --- .../setup/Setup_SRE_WebApp_Servers.ps1 | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 b/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 index 64b98f0af7..24606c546e 100644 --- a/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 +++ b/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 @@ -40,7 +40,15 @@ Add-NetworkSecurityGroupRule -NetworkSecurityGroup $nsgGitlabInternal ` -SourceAddressPrefix VirtualNetwork -SourcePortRange * ` -DestinationAddressPrefix Internet -DestinationPortRange * + $nsgGitlabExternal = Deploy-NetworkSecurityGroup -Name $config.sre.network.nsg.airlock.name -ResourceGroupName $config.sre.network.vnet.rg -Location $config.sre.location +Add-NetworkSecurityGroupRule -NetworkSecurityGroup $nsgGitlabExternal ` + -Name "InboundDenyAll" ` + -Description "Inbound deny everything" ` + -Priority 4000 ` + -Direction Inbound -Access Deny -Protocol * ` + -SourceAddressPrefix * -SourcePortRange * ` + -DestinationAddressPrefix * -DestinationPortRange * # Check that VNET and subnet exist From 2ab0fce9d4ea30552983e03246c72a5ee3658a82 Mon Sep 17 00:00:00 2001 From: Jack Roberts Date: Mon, 27 Apr 2020 21:26:22 +0100 Subject: [PATCH 005/155] Add a user with an API key to gitlab internal Adds a user on gitlab internal that can be used to ingress repos from gitlab external: * Config and setup changed to add secrets for username, password and API token (stored in keyvault). * gitlab-rails commands in cloud init to add the user and generate the token. By default the username is "external" with email "external@", e.g. external@cw20.dsgroupdev.co.uk. The email isn't valid/used but the gitlab server will only accept new users from the given domain. --- deployment/common/Configuration.psm1 | 3 + .../cloud-init-gitlab-internal.template.yaml | 6 ++ .../setup/Setup_SRE_WebApp_Servers.ps1 | 93 ++++++++++--------- .../full/sre_mortestsandbox_full_config.json | 3 + 4 files changed, 61 insertions(+), 44 deletions(-) diff --git a/deployment/common/Configuration.psm1 b/deployment/common/Configuration.psm1 index a2958f8333..93173ef45b 100644 --- a/deployment/common/Configuration.psm1 +++ b/deployment/common/Configuration.psm1 @@ -393,6 +393,9 @@ function Add-SreConfig { gitlabLdapPassword = "$($config.sre.shortName)-gitlab-ldap-password" gitlabRootPassword = "$($config.sre.shortName)-gitlab-root-password" gitlabUserPassword = "$($config.sre.shortName)-gitlab-user-password" + gitlabExternalUsername = "$($config.sre.shortName)-gitlab-external-username" + gitlabExternalPassword = "$($config.sre.shortName)-gitlab-external-password" + gitlabExternalAPIToken = "$($config.sre.shortName)-gitlab-external-api-token" hackmdLdapPassword = "$($config.sre.shortName)-hackmd-ldap-password" hackmdUserPassword = "$($config.sre.shortName)-hackmd-user-password" letsEncryptCertificate = "$($config.sre.shortName)-lets-encrypt-certificate" diff --git a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-internal.template.yaml b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-internal.template.yaml index 3781938e52..789f868345 100644 --- a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-internal.template.yaml +++ b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-internal.template.yaml @@ -83,6 +83,12 @@ runcmd: # Restrict login to SHM domain (must be done AFTER GitLab update) - | gitlab-rails runner "ApplicationSetting.last.update_attributes(domain_whitelist: [''])" + # Create user for ingressing external git repos + - | + echo "user = User.create(:username => '', :password => '', :password_confirmation => '', :email =>'@', :skip_confirmation => true, :name => '');user.save!;exit;" | gitlab-rails console -e production + # Create a API token for the ingress user created above + - | + echo "user = User.find_by(username: '');user.personal_access_tokens.create(name: 'apitoken', token_digest: Gitlab::CryptoHelper.sha256(''), impersonation: false, scopes: [:api]);exit;" | gitlab-rails console -e production # Reload GitLab configuration and restart GitLab - gitlab-ctl reconfigure - gitlab-ctl restart diff --git a/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 b/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 index 24606c546e..c99091396e 100644 --- a/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 +++ b/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 @@ -25,6 +25,9 @@ $sreAdminPassword = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name $gitlabRootPassword = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -SecretName $config.sre.keyVault.secretNames.gitlabRootPassword $gitlabUserPassword = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -SecretName $config.sre.keyVault.secretNames.gitlabUserPassword $gitlabLdapPassword = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -SecretName $config.sre.keyVault.secretNames.gitlabLdapPassword +$gitlabExternalUsername = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -SecretName $config.sre.keyVault.secretNames.gitlabExternalUsername -DefaultValue "external" +$gitlabExternalPassword = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -SecretName $config.sre.keyVault.secretNames.gitlabExternalPassword +$gitlabExternalAPIToken = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -SecretName $config.sre.keyVault.secretNames.gitlabExternalAPIToken $hackmdUserPassword = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -SecretName $config.sre.keyVault.secretNames.hackmdUserPassword $hackmdLdapPassword = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -SecretName $config.sre.keyVault.secretNames.hackmdLdapPassword @@ -76,7 +79,11 @@ $gitlabCloudInit = $gitlabCloudInitTemplate.Replace('', $shmDcFq Replace('',$config.sre.webapps.gitlab.internal.hostname). Replace('',$gitlabFqdn). Replace('',$gitlabRootPassword). - Replace('',$config.shm.domain.fqdn) + Replace('',$config.shm.domain.fqdn). + Replace('',$gitlabExternalUsername). + Replace('',$gitlabExternalPassword). + Replace('',$gitlabExternalAPIToken) + # Encode as base64 $gitlabCloudInitEncoded = [System.Convert]::ToBase64String([System.Text.Encoding]::UTF8.GetBytes($gitlabCloudInit)) @@ -106,49 +113,6 @@ $hackmdCloudInitEncoded = [System.Convert]::ToBase64String([System.Text.Encoding $_ = Deploy-ResourceGroup -Name $config.sre.webapps.rg -Location $config.sre.location -# Deploy NIC and data disks for gitlab.external -# --------------------------------------------- - -$vmName = $config.sre.webapps.gitlab.external.vmName -$vmIpAddress = $config.sre.webapps.gitlab.external.ip -$vmNic = Deploy-VirtualMachineNIC -Name "$vmName-NIC" -ResourceGroupName $config.sre.webapps.rg -Subnet $subnet -PrivateIpAddress $vmIpAddress -Location $config.sre.location - - -# Deploy the GitLab external VM -# ------------------------------ - -$bootDiagnosticsAccount = Deploy-StorageAccount -Name $config.sre.storage.bootdiagnostics.accountName -ResourceGroupName $config.sre.storage.bootdiagnostics.rg -Location $config.sre.location -$gitlabExternalCloudInitTemplate = Join-Path $PSScriptRoot ".." "cloud_init" "cloud-init-gitlab-external.template.yaml" | Get-Item | Get-Content -Raw - -$params = @{ - Name = $vmName - Size = $config.sre.webapps.gitlab.external.vmSize - AdminPassword = $sreAdminPassword - AdminUsername = $sreAdminUsername - BootDiagnosticsAccount = $bootDiagnosticsAccount - CloudInitYaml = $gitlabExternalCloudInitTemplate - location = $config.sre.location - NicId = $vmNic.Id - OsDiskType = "Standard_LRS" - ResourceGroupName = $config.sre.webapps.rg - ImageSku = "18.04-LTS" -} -$_ = Deploy-UbuntuVirtualMachine @params - -Add-LogMessage -Level Info "Waiting for cloud-init provisioning to finish (this will take 5+ minutes)..." -$progress = 0 -$gitlabStatuses = (Get-AzVM -Name $config.sre.webapps.gitlab.external.vmName -ResourceGroupName $config.sre.webapps.rg -Status).Statuses.Code -while (-Not ($gitlabStatuses.Contains("ProvisioningState/succeeded") -and $gitlabStatuses.Contains("PowerState/stopped"))) { - $progress = [math]::min(100, $progress + 1) - $gitlabStatuses = (Get-AzVM -Name $config.sre.webapps.gitlab.external.vmName -ResourceGroupName $config.sre.webapps.rg -Status).Statuses.Code - Write-Progress -Activity "Deployment status:" -Status "GitLab External [$($gitlabStatuses[0]) $($gitlabStatuses[1])]" -PercentComplete $progress - Start-Sleep 10 -} - -Add-LogMessage -Level Info "Rebooting the $name VM: '$vmName'" -Enable-AzVM -Name $vmName -ResourceGroupName $config.sre.webapps.rg - - # Deploy GitLab/HackMD VMs from template # -------------------------------------- Add-LogMessage -Level Info "Deploying GitLab/HackMD VMs from template..." @@ -210,6 +174,47 @@ foreach ($nameVMNameParamsPair in (("HackMD", $config.sre.webapps.hackmd.vmName) } } +# Deploy NIC and data disks for gitlab.external +# --------------------------------------------- +$vmName = $config.sre.webapps.gitlab.external.vmName +$vmIpAddress = $config.sre.webapps.gitlab.external.ip +$vmNic = Deploy-VirtualMachineNIC -Name "$vmName-NIC" -ResourceGroupName $config.sre.webapps.rg -Subnet $subnet -PrivateIpAddress $vmIpAddress -Location $config.sre.location + + +# Deploy the GitLab external VM +# ------------------------------ +$bootDiagnosticsAccount = Deploy-StorageAccount -Name $config.sre.storage.bootdiagnostics.accountName -ResourceGroupName $config.sre.storage.bootdiagnostics.rg -Location $config.sre.location +$gitlabExternalCloudInitTemplate = Join-Path $PSScriptRoot ".." "cloud_init" "cloud-init-gitlab-external.template.yaml" | Get-Item | Get-Content -Raw + +$params = @{ + Name = $vmName + Size = $config.sre.webapps.gitlab.external.vmSize + AdminPassword = $sreAdminPassword + AdminUsername = $sreAdminUsername + BootDiagnosticsAccount = $bootDiagnosticsAccount + CloudInitYaml = $gitlabExternalCloudInitTemplate + location = $config.sre.location + NicId = $vmNic.Id + OsDiskType = "Standard_LRS" + ResourceGroupName = $config.sre.webapps.rg + ImageSku = "18.04-LTS" +} +$_ = Deploy-UbuntuVirtualMachine @params + +Add-LogMessage -Level Info "Waiting for cloud-init provisioning to finish (this will take 5+ minutes)..." +$progress = 0 +$gitlabStatuses = (Get-AzVM -Name $config.sre.webapps.gitlab.external.vmName -ResourceGroupName $config.sre.webapps.rg -Status).Statuses.Code +while (-Not ($gitlabStatuses.Contains("ProvisioningState/succeeded") -and $gitlabStatuses.Contains("PowerState/stopped"))) { + $progress = [math]::min(100, $progress + 1) + $gitlabStatuses = (Get-AzVM -Name $config.sre.webapps.gitlab.external.vmName -ResourceGroupName $config.sre.webapps.rg -Status).Statuses.Code + Write-Progress -Activity "Deployment status:" -Status "GitLab External [$($gitlabStatuses[0]) $($gitlabStatuses[1])]" -PercentComplete $progress + Start-Sleep 10 +} + +Add-LogMessage -Level Info "Rebooting the $name VM: '$vmName'" +Enable-AzVM -Name $vmName -ResourceGroupName $config.sre.webapps.rg + + # Switch back to original subscription # ------------------------------------ $_ = Set-AzContext -Context $originalContext; diff --git a/environment_configs/full/sre_mortestsandbox_full_config.json b/environment_configs/full/sre_mortestsandbox_full_config.json index 96fddead66..2ecf56a4a0 100644 --- a/environment_configs/full/sre_mortestsandbox_full_config.json +++ b/environment_configs/full/sre_mortestsandbox_full_config.json @@ -225,6 +225,9 @@ "gitlabLdapPassword": "sre-sandbox-gitlab-ldap-password", "gitlabRootPassword": "sre-sandbox-gitlab-root-password", "gitlabUserPassword": "sre-sandbox-gitlab-user-password", + "gitlabExternalUsername": "sre-sandbox-gitlab-external-username", + "gitlabExternalPassword": "sre-sandbox-gitlab-external-password", + "gitlabExternalAPIToken": "sre-sandbox-gitlab-external-api-token", "hackmdLdapPassword": "sre-sandbox-hackmd-ldap-password", "hackmdUserPassword": "sre-sandbox-hackmd-user-password", "letsEncryptCertificate": "sre-sandbox-lets-encrypt-certificate", From 424bbd3af5051e611c6ce72510507055c2d7cabf Mon Sep 17 00:00:00 2001 From: James Robinson Date: Tue, 28 Apr 2020 11:02:06 +0100 Subject: [PATCH 006/155] Add a new session server for holding connections to review boxes --- deployment/common/Configuration.psm1 | 5 + .../sre-data-server-template.json | 554 +++---- .../arm_templates/sre-rds-template.json | 1411 +++++++++-------- .../arm_templates/sre-webapps-template.json | 596 +++---- .../scripts/Move_RDS_VMs_Into_OUs.ps1 | 44 +- .../Deploy_RDS_Environment.template.ps1 | 11 +- .../setup/Setup_SRE_VNET_RDS.ps1 | 25 +- 7 files changed, 1426 insertions(+), 1220 deletions(-) diff --git a/deployment/common/Configuration.psm1 b/deployment/common/Configuration.psm1 index 93173ef45b..2e925977da 100644 --- a/deployment/common/Configuration.psm1 +++ b/deployment/common/Configuration.psm1 @@ -455,6 +455,11 @@ function Add-SreConfig { vmSize = "Standard_DS2_v2" nsg = "NSG_SRE_$($config.sre.id)_RDS_SESSION_HOSTS".ToUpper() } + sessionHost3 = [ordered]@{ + vmName = "REV-SRE-$($config.sre.id)".ToUpper() | TrimToLength 15 + vmSize = "Standard_DS2_v2" + nsg = "NSG_SRE_$($config.sre.id)_RDS_SESSION_HOSTS".ToUpper() + } } # Set which IPs can access the Safe Haven: if 'default' is given then apply sensible defaults diff --git a/deployment/secure_research_environment/arm_templates/sre-data-server-template.json b/deployment/secure_research_environment/arm_templates/sre-data-server-template.json index d7b35c8ada..8c31528f12 100644 --- a/deployment/secure_research_environment/arm_templates/sre-data-server-template.json +++ b/deployment/secure_research_environment/arm_templates/sre-data-server-template.json @@ -1,278 +1,278 @@ -{ - "$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#", - "contentVersion": "1.0.0.0", - "parameters": { - "Administrator_Password": { - "type": "securestring", - "metadata": { - "description": "Enter name for VM Administrator Password" - } - }, - "Administrator_User": { - "type": "string", - "metadata": { - "description": "Enter name for VM Administrator" - } - }, - "BootDiagnostics_Account_Name": { - "type": "string", - "metadata": { - "description": "Enter name of storage account used for boot diagnostics" - } - }, - "Data_Server_Name": { - "type": "string", - "metadata": { - "description": "Enter name for data server VM" - } - }, - "DC_Administrator_Password": { - "type": "securestring", - "metadata": { - "description": "Enter name for DC Administrator Password" - } - }, - "DC_Administrator_User": { - "type": "string", - "metadata": { - "description": "Enter name for DC Administrator" - } - }, - "Disk_Size_Egress_GB": { - "type": "string", - "metadata": { - "description": "Egress disk size in GB" - } - }, - "Disk_Size_Ingress_GB": { - "type": "string", - "metadata": { - "description": "Ingress disk size in GB" - } - }, - "Disk_Size_Shared_GB": { - "type": "string", - "metadata": { - "description": "Shared disk size in GB" - } - }, - "Domain_Name": { - "type": "string", - "metadata": { - "description": "Enter Domain Name" - } - }, - "IP_Address": { - "type": "string", - "defaultValue": "10.250.x.100", - "metadata": { - "description": "Enter IP_Address for VM, must end in 100" - } - }, - "Virtual_Network_Name": { - "type": "string", - "metadata": { - "description": "Enter name of virtual network to provision this VM" - } - }, - "Virtual_Network_Resource_Group": { - "type": "string", - "metadata": { - "description": "Enter name of resource group that is assoicated with the virtual network above" - } - }, - "Virtual_Network_Subnet": { - "type": "string", - "metadata": { - "description": "Enter name of subnet where you want to provision this VM" - } - }, - "VM_Size": { - "type": "string", - "defaultValue": "Standard_B2ms", - "allowedValues": [ - "Standard_B2ms", - "Standard_B4ms", - "Standard_F4s_v2", - "Standard_DS2_v2", - "Standard_D2s_v3" - ], - "metadata": { - "description": "Select size of VM" - } - } - }, - "variables": { - "nic": "[concat(parameters('Data_Server_Name'), '-', 'NIC')]", - "vnetID": "[resourceId(parameters('Virtual_Network_Resource_Group'), 'Microsoft.Network/virtualNetworks', parameters('Virtual_Network_Name'))]", - "subnet": "[concat(variables('vnetID'),'/subnets/', parameters('Virtual_Network_Subnet'))]", - "ingressdatadisk": "[concat(parameters('Data_Server_Name'), '-INGRESS-DATA-DISK')]", - "shareddatadisk": "[concat(parameters('Data_Server_Name'), '-SHARED-DATA-DISK')]", - "egressdatadisk": "[concat(parameters('Data_Server_Name'), '-EGRESS-DATA-DISK')]" - }, - "resources": [{ - "type": "Microsoft.Compute/virtualMachines", - "name": "[parameters('Data_Server_Name')]", - "apiVersion": "2018-06-01", - "location": "[resourceGroup().location]", - "scale": null, - "properties": { - "hardwareProfile": { - "vmSize": "[parameters('VM_Size')]" - }, - "storageProfile": { - "imageReference": { - "publisher": "MicrosoftWindowsServer", - "offer": "WindowsServer", - "sku": "2019-Datacenter", - "version": "latest" - }, - "osDisk": { - "osType": "Windows", - "name": "[concat(parameters('Data_Server_Name'),'-OS-DISK')]", - "createOption": "FromImage", - "caching": "ReadWrite", - "writeAcceleratorEnabled": false, - "managedDisk": { - "storageAccountType": "Standard_LRS" - }, - "diskSizeGB": 128 - }, - "dataDisks": [ - { - "lun": 0, - "name": "[variables('ingressdatadisk')]", - "createOption": "Empty", - "caching": "None", - "writeAcceleratorEnabled": false, - "managedDisk": { - "storageAccountType": "Standard_LRS" - }, - "diskSizeGB": "[parameters('Disk_Size_Ingress_GB')]" - }, - { - "lun": 1, - "name": "[variables('shareddatadisk')]", - "createOption": "Empty", - "caching": "None", - "writeAcceleratorEnabled": false, - "managedDisk": { - "storageAccountType": "Standard_LRS" - }, - "diskSizeGB": "[parameters('Disk_Size_Shared_GB')]" - }, - { - "lun": 2, - "name": "[variables('egressdatadisk')]", - "createOption": "Empty", - "caching": "None", - "writeAcceleratorEnabled": false, - "managedDisk": { - "storageAccountType": "Standard_LRS" - }, - "diskSizeGB": "[parameters('Disk_Size_Egress_GB')]" - } - ] - }, - "osProfile": { - "computerName": "[parameters('Data_Server_Name')]", - "adminUsername": "[parameters('Administrator_User')]", - "adminPassword": "[parameters('Administrator_Password')]", - "windowsConfiguration": { - "provisionVMAgent": true, - "enableAutomaticUpdates": true - }, - "secrets": [] - }, - "networkProfile": { - "networkInterfaces": [{ - "id": "[resourceId('Microsoft.Network/networkInterfaces', variables('nic'))]", - "properties": { - "primary": true - } - }] - }, - "diagnosticsProfile": { - "bootDiagnostics": { - "enabled": true, - "storageUri": "[concat('https', '://', parameters('BootDiagnostics_Account_Name'), '.blob.core.windows.net', '/')]" - } - } - }, - "dependsOn": [ - "[resourceId('Microsoft.Network/networkInterfaces', variables('nic'))]" - ] - }, - { - "type": "Microsoft.Network/networkInterfaces", - "name": "[variables('nic')]", - "apiVersion": "2018-10-01", - "location": "[resourceGroup().location]", - "scale": null, - "properties": { - "ipConfigurations": [{ - "name": "ipconfig1", - "properties": { - "privateIPAddress": "[parameters('IP_Address')]", - "privateIPAllocationMethod": "Static", - "subnet": { - "id": "[variables('subnet')]" - }, - "primary": true, - "privateIPAddressVersion": "IPv4" - } - }], - "dnsSettings": { - "dnsServers": [], - "appliedDnsServers": [] - }, - "enableAcceleratedNetworking": false, - "enableIPForwarding": false, - "primary": true, - "tapConfigurations": [] - }, - "dependsOn": [] - }, - { - "type": "Microsoft.Compute/virtualMachines/extensions", - "name": "[concat(parameters('Data_Server_Name'), '/', 'bginfo')]", - "apiVersion": "2018-06-01", - "location": "[resourceGroup().location]", - "scale": null, - "properties": { - "autoUpgradeMinorVersion": true, - "publisher": "Microsoft.Compute", - "type": "bginfo", - "typeHandlerVersion": "2.1" - }, - "dependsOn": [ - "[resourceId('Microsoft.Compute/virtualMachines', parameters('Data_Server_Name'))]" - ] - }, - { - "apiVersion": "2018-06-01", - "type": "Microsoft.Compute/virtualMachines/extensions", - "name": "[concat(parameters('Data_Server_Name'),'/joindomain')]", - "location": "[resourceGroup().location]", - "dependsOn": [ - "[resourceId('Microsoft.Compute/virtualMachines', parameters('Data_Server_Name'))]", - "[resourceId('Microsoft.Compute/virtualMachines/extensions', parameters('Data_Server_Name'),'bginfo')]" - ], - "properties": { - "publisher": "Microsoft.Compute", - "type": "JsonADDomainExtension", - "typeHandlerVersion": "1.3", - "autoUpgradeMinorVersion": true, - "settings": { - "Name": "[parameters('Domain_Name')]", - "User": "[concat(parameters('Domain_Name'), '\\', parameters('DC_Administrator_User'))]", - "Restart": "true", - "Options": "3" - }, - "protectedSettings": { - "Password": "[parameters('DC_Administrator_Password')]" - } - } - } - ] +{ + "$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "Administrator_Password": { + "type": "securestring", + "metadata": { + "description": "Enter name for VM Administrator Password" + } + }, + "Administrator_User": { + "type": "string", + "metadata": { + "description": "Enter name for VM Administrator" + } + }, + "BootDiagnostics_Account_Name": { + "type": "string", + "metadata": { + "description": "Enter name of storage account used for boot diagnostics" + } + }, + "Data_Server_Name": { + "type": "string", + "metadata": { + "description": "Enter name for data server VM" + } + }, + "DC_Administrator_Password": { + "type": "securestring", + "metadata": { + "description": "Enter name for DC Administrator Password" + } + }, + "DC_Administrator_User": { + "type": "string", + "metadata": { + "description": "Enter name for DC Administrator" + } + }, + "Disk_Size_Egress_GB": { + "type": "string", + "metadata": { + "description": "Egress disk size in GB" + } + }, + "Disk_Size_Ingress_GB": { + "type": "string", + "metadata": { + "description": "Ingress disk size in GB" + } + }, + "Disk_Size_Shared_GB": { + "type": "string", + "metadata": { + "description": "Shared disk size in GB" + } + }, + "Domain_Name": { + "type": "string", + "metadata": { + "description": "Enter Domain Name" + } + }, + "IP_Address": { + "type": "string", + "defaultValue": "10.250.x.100", + "metadata": { + "description": "Enter IP_Address for VM, must end in 100" + } + }, + "Virtual_Network_Name": { + "type": "string", + "metadata": { + "description": "Enter name of virtual network to provision this VM" + } + }, + "Virtual_Network_Resource_Group": { + "type": "string", + "metadata": { + "description": "Enter name of resource group that is assoicated with the virtual network above" + } + }, + "Virtual_Network_Subnet": { + "type": "string", + "metadata": { + "description": "Enter name of subnet where you want to provision this VM" + } + }, + "VM_Size": { + "type": "string", + "defaultValue": "Standard_B2ms", + "allowedValues": [ + "Standard_B2ms", + "Standard_B4ms", + "Standard_F4s_v2", + "Standard_DS2_v2", + "Standard_D2s_v3" + ], + "metadata": { + "description": "Select size of VM" + } + } + }, + "variables": { + "nic": "[concat(parameters('Data_Server_Name'), '-', 'NIC')]", + "vnetID": "[resourceId(parameters('Virtual_Network_Resource_Group'), 'Microsoft.Network/virtualNetworks', parameters('Virtual_Network_Name'))]", + "subnet": "[concat(variables('vnetID'),'/subnets/', parameters('Virtual_Network_Subnet'))]", + "ingressdatadisk": "[concat(parameters('Data_Server_Name'), '-INGRESS-DATA-DISK')]", + "shareddatadisk": "[concat(parameters('Data_Server_Name'), '-SHARED-DATA-DISK')]", + "egressdatadisk": "[concat(parameters('Data_Server_Name'), '-EGRESS-DATA-DISK')]" + }, + "resources": [{ + "type": "Microsoft.Compute/virtualMachines", + "name": "[parameters('Data_Server_Name')]", + "apiVersion": "2018-06-01", + "location": "[resourceGroup().location]", + "scale": null, + "properties": { + "hardwareProfile": { + "vmSize": "[parameters('VM_Size')]" + }, + "storageProfile": { + "imageReference": { + "publisher": "MicrosoftWindowsServer", + "offer": "WindowsServer", + "sku": "2019-Datacenter", + "version": "latest" + }, + "osDisk": { + "osType": "Windows", + "name": "[concat(parameters('Data_Server_Name'),'-OS-DISK')]", + "createOption": "FromImage", + "caching": "ReadWrite", + "writeAcceleratorEnabled": false, + "managedDisk": { + "storageAccountType": "Standard_LRS" + }, + "diskSizeGB": 128 + }, + "dataDisks": [ + { + "lun": 0, + "name": "[variables('ingressdatadisk')]", + "createOption": "Empty", + "caching": "None", + "writeAcceleratorEnabled": false, + "managedDisk": { + "storageAccountType": "Standard_LRS" + }, + "diskSizeGB": "[parameters('Disk_Size_Ingress_GB')]" + }, + { + "lun": 1, + "name": "[variables('shareddatadisk')]", + "createOption": "Empty", + "caching": "None", + "writeAcceleratorEnabled": false, + "managedDisk": { + "storageAccountType": "Standard_LRS" + }, + "diskSizeGB": "[parameters('Disk_Size_Shared_GB')]" + }, + { + "lun": 2, + "name": "[variables('egressdatadisk')]", + "createOption": "Empty", + "caching": "None", + "writeAcceleratorEnabled": false, + "managedDisk": { + "storageAccountType": "Standard_LRS" + }, + "diskSizeGB": "[parameters('Disk_Size_Egress_GB')]" + } + ] + }, + "osProfile": { + "computerName": "[parameters('Data_Server_Name')]", + "adminUsername": "[parameters('Administrator_User')]", + "adminPassword": "[parameters('Administrator_Password')]", + "windowsConfiguration": { + "provisionVMAgent": true, + "enableAutomaticUpdates": true + }, + "secrets": [] + }, + "networkProfile": { + "networkInterfaces": [{ + "id": "[resourceId('Microsoft.Network/networkInterfaces', variables('nic'))]", + "properties": { + "primary": true + } + }] + }, + "diagnosticsProfile": { + "bootDiagnostics": { + "enabled": true, + "storageUri": "[concat('https', '://', parameters('BootDiagnostics_Account_Name'), '.blob.core.windows.net', '/')]" + } + } + }, + "dependsOn": [ + "[resourceId('Microsoft.Network/networkInterfaces', variables('nic'))]" + ] + }, + { + "type": "Microsoft.Network/networkInterfaces", + "name": "[variables('nic')]", + "apiVersion": "2018-10-01", + "location": "[resourceGroup().location]", + "scale": null, + "properties": { + "ipConfigurations": [{ + "name": "ipconfig1", + "properties": { + "privateIPAddress": "[parameters('IP_Address')]", + "privateIPAllocationMethod": "Static", + "subnet": { + "id": "[variables('subnet')]" + }, + "primary": true, + "privateIPAddressVersion": "IPv4" + } + }], + "dnsSettings": { + "dnsServers": [], + "appliedDnsServers": [] + }, + "enableAcceleratedNetworking": false, + "enableIPForwarding": false, + "primary": true, + "tapConfigurations": [] + }, + "dependsOn": [] + }, + { + "type": "Microsoft.Compute/virtualMachines/extensions", + "name": "[concat(parameters('Data_Server_Name'), '/', 'bginfo')]", + "apiVersion": "2018-06-01", + "location": "[resourceGroup().location]", + "scale": null, + "properties": { + "autoUpgradeMinorVersion": true, + "publisher": "Microsoft.Compute", + "type": "bginfo", + "typeHandlerVersion": "2.1" + }, + "dependsOn": [ + "[resourceId('Microsoft.Compute/virtualMachines', parameters('Data_Server_Name'))]" + ] + }, + { + "apiVersion": "2018-06-01", + "type": "Microsoft.Compute/virtualMachines/extensions", + "name": "[concat(parameters('Data_Server_Name'),'/joindomain')]", + "location": "[resourceGroup().location]", + "dependsOn": [ + "[resourceId('Microsoft.Compute/virtualMachines', parameters('Data_Server_Name'))]", + "[resourceId('Microsoft.Compute/virtualMachines/extensions', parameters('Data_Server_Name'),'bginfo')]" + ], + "properties": { + "publisher": "Microsoft.Compute", + "type": "JsonADDomainExtension", + "typeHandlerVersion": "1.3", + "autoUpgradeMinorVersion": true, + "settings": { + "Name": "[parameters('Domain_Name')]", + "User": "[concat(parameters('Domain_Name'), '\\', parameters('DC_Administrator_User'))]", + "Restart": "true", + "Options": "3" + }, + "protectedSettings": { + "Password": "[parameters('DC_Administrator_Password')]" + } + } + } + ] } \ No newline at end of file diff --git a/deployment/secure_research_environment/arm_templates/sre-rds-template.json b/deployment/secure_research_environment/arm_templates/sre-rds-template.json index aded25a915..8f6f54b2d3 100644 --- a/deployment/secure_research_environment/arm_templates/sre-rds-template.json +++ b/deployment/secure_research_environment/arm_templates/sre-rds-template.json @@ -1,619 +1,794 @@ -{ - "$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#", - "contentVersion": "1.0.0.0", - "parameters": { - "Administrator_Password": { - "type": "securestring", - "metadata": { - "description": "Enter name for VM Administrator_Password" - } - }, - "Administrator_User": { - "type": "string", - "metadata": { - "description": "Enter name for VM Administrator" - } - }, - "BootDiagnostics_Account_Name": { - "type": "string", - "metadata": { - "description": "Enter name of storage account used for boot diagnostics" - } - }, - "DC_Administrator_Password": { - "type": "securestring", - "metadata": { - "description": "Enter name for DC Administrator Password" - } - }, - "DC_Administrator_User": { - "type": "string", - "metadata": { - "description": "Enter name for DC Administrator" - } - }, - "Domain_Name": { - "type": "string", - "metadata": { - "description": "Enter Domain Name" - } - }, - "NSG_Gateway_Name": { - "type": "string", - "metadata": { - "description": "Enter NSG Gateway Name" - } - }, - "RDS_Gateway_IP_Address": { - "type": "string", - "defaultValue": "10.250.x.250", - "metadata": { - "description": "Enter IP address for RDS Gateway VM, must end in 250" - } - }, - "RDS_Gateway_Name": { - "type": "string", - "metadata": { - "description": "Name of the RDS gateway VM" - } - }, - "RDS_Gateway_VM_Size": { - "type": "string", - "defaultValue": "Standard_B2ms", - "allowedValues": [ - "Standard_B2ms", - "Standard_B8ms", - "Standard_D4s_v3", - "Standard_D32_v3", - "Standard_DS2_v2", - "Standard_DS4_v2", - "Standard_DS14_v2", - "Standard_F8s_v2", - "Standard_F16s_v2", - "Standard_F32s_v2" - ], - "metadata": { - "description": "Select size of RDS Gateway VM" - } - }, - "RDS_Session_Host_Apps_IP_Address": { - "type": "string", - "defaultValue": "10.250.x.249", - "metadata": { - "description": "Enter IP address for RDS_Session_Host_Apps VM, must end in 249" - } - }, - "RDS_Session_Host_Apps_Name": { - "type": "string", - "metadata": { - "description": "Name of the RDS apps session host VM" - } - }, - "RDS_Session_Host_Apps_VM_Size": { - "type": "string", - "defaultValue": "Standard_B2ms", - "allowedValues": [ - "Standard_B2ms", - "Standard_B8ms", - "Standard_D4s_v3", - "Standard_D32_v3", - "Standard_DS2_v2", - "Standard_DS4_v2", - "Standard_DS14_v2", - "Standard_F8s_v2", - "Standard_F16s_v2", - "Standard_F32s_v2" - ], - "metadata": { - "description": "Select size of RDS_Session_Host_Apps VM" - } - }, - "RDS_Session_Host_Desktop_IP_Address": { - "type": "string", - "defaultValue": "10.250.x.248", - "metadata": { - "description": "Enter IP address for RDS_Session_Host_Apps VM, must end in 248" - } - }, - "RDS_Session_Host_Desktop_Name": { - "type": "string", - "metadata": { - "description": "Name of the RDS desktop session host VM" - } - }, - "RDS_Session_Host_Desktop_VM_Size": { - "type": "string", - "defaultValue": "Standard_B2ms", - "allowedValues": [ - "Standard_B2ms", - "Standard_B8ms", - "Standard_D4s_v3", - "Standard_D32_v3", - "Standard_DS2_v2", - "Standard_DS4_v2", - "Standard_DS14_v2", - "Standard_F8s_v2", - "Standard_F16s_v2", - "Standard_F32s_v2" - ], - "metadata": { - "description": "Select size of RDS_Session_Host_Apps VM" - } - }, - "SRE_ID": { - "type": "string", - "metadata": { - "description": "Enter ID for SRE e.g. 'testsandbox'" - } - }, - "Virtual_Network_Name": { - "type": "string", - "metadata": { - "description": "Enter name of virtual network to provision this VM" - } - }, - "Virtual_Network_Resource_Group": { - "type": "string", - "metadata": { - "description": "Enter name of resource group that is assoicated with the virtual network above" - } - }, - "Virtual_Network_Subnet": { - "type": "string", - "defaultValue": "Subnet-RDS", - "metadata": { - "description": "Enter name of subnet where you want to provision this VM" - } - } - }, - "variables": { - "rdsnic": "[concat(parameters('RDS_Gateway_Name'),'-','NIC')]", - "rdssh1nic": "[concat(parameters('RDS_Session_Host_Apps_Name'),'-','NIC')]", - "rdssh2nic": "[concat(parameters('RDS_Session_Host_Desktop_Name'),'-','NIC')]", - "rdspip": "[concat(parameters('RDS_Gateway_Name'),'-','PIP')]", - "vnetID": "[resourceId(parameters('Virtual_Network_Resource_Group'), 'Microsoft.Network/virtualNetworks', parameters('Virtual_Network_Name'))]", - "subnet": "[concat(variables('vnetID'),'/subnets/', parameters('Virtual_Network_Subnet'))]" - }, - "resources": [{ - "type": "Microsoft.Compute/virtualMachines", - "name": "[parameters('RDS_Gateway_Name')]", - "apiVersion": "2018-06-01", - "location": "[resourceGroup().location]", - "scale": null, - "properties": { - "hardwareProfile": { - "vmSize": "[parameters('RDS_Gateway_VM_Size')]" - }, - "storageProfile": { - "imageReference": { - "publisher": "MicrosoftWindowsServer", - "offer": "WindowsServer", - "sku": "2019-Datacenter", - "version": "latest" - }, - "osDisk": { - "osType": "Windows", - "name": "[concat(parameters('RDS_Gateway_Name'),'-OS-DISK')]", - "createOption": "FromImage", - "caching": "ReadWrite", - "writeAcceleratorEnabled": false, - "managedDisk": { - "storageAccountType": "Standard_LRS" - }, - "diskSizeGB": 128 - }, - "dataDisks": [{ - "lun": 0, - "name": "[concat(parameters('RDS_Gateway_Name'),'-DATA-DISK-1')]", - "createOption": "Empty", - "caching": "None", - "writeAcceleratorEnabled": false, - "managedDisk": { - "storageAccountType": "Standard_LRS" - }, - "diskSizeGB": 1023 - }, - { - "lun": 1, - "name": "[concat(parameters('RDS_Gateway_Name'),'-DATA-DISK-2')]", - "createOption": "Empty", - "caching": "None", - "writeAcceleratorEnabled": false, - "managedDisk": { - "storageAccountType": "Standard_LRS" - }, - "diskSizeGB": 1023 - } - ] - }, - "osProfile": { - "computerName": "[parameters('RDS_Gateway_Name')]", - "adminUsername": "[parameters('Administrator_User')]", - "adminPassword": "[parameters('Administrator_Password')]", - "windowsConfiguration": { - "provisionVMAgent": true, - "enableAutomaticUpdates": true - }, - "secrets": [], - "allowExtensionOperations": true - }, - "networkProfile": { - "networkInterfaces": [{ - "id": "[resourceId('Microsoft.Network/networkInterfaces', variables('rdsnic'))]", - "properties": { - "primary": true - } - }] - }, - "diagnosticsProfile": { - "bootDiagnostics": { - "enabled": true, - "storageUri": "[concat('https://', parameters('BootDiagnostics_Account_Name'), '.blob.core.windows.net/')]" - } - } - }, - "dependsOn": [ - "[resourceId('Microsoft.Network/networkInterfaces', variables('rdsnic'))]" - ] - }, - { - "type": "Microsoft.Compute/virtualMachines", - "name": "[parameters('RDS_Session_Host_Apps_Name')]", - "apiVersion": "2018-06-01", - "location": "[resourceGroup().location]", - "scale": null, - "properties": { - "hardwareProfile": { - "vmSize": "[parameters('RDS_Session_Host_Apps_VM_Size')]" - }, - "storageProfile": { - "imageReference": { - "publisher": "MicrosoftWindowsServer", - "offer": "WindowsServer", - "sku": "2019-Datacenter", - "version": "latest" - }, - "osDisk": { - "osType": "Windows", - "name": "[concat(parameters('RDS_Session_Host_Apps_Name'),'-OS-DISK')]", - "createOption": "FromImage", - "caching": "ReadWrite", - "writeAcceleratorEnabled": false, - "managedDisk": { - "storageAccountType": "Standard_LRS" - }, - "diskSizeGB": 128 - }, - "dataDisks": [] - }, - "osProfile": { - "computerName": "[parameters('RDS_Session_Host_Apps_Name')]", - "adminUsername": "[parameters('Administrator_User')]", - "adminPassword": "[parameters('Administrator_Password')]", - "windowsConfiguration": { - "provisionVMAgent": true, - "enableAutomaticUpdates": true - }, - "secrets": [], - "allowExtensionOperations": true - }, - "networkProfile": { - "networkInterfaces": [{ - "id": "[resourceId('Microsoft.Network/networkInterfaces', variables('rdssh1nic'))]", - "properties": { - "primary": true - } - }] - }, - "diagnosticsProfile": { - "bootDiagnostics": { - "enabled": true, - "storageUri": "[concat('https://', parameters('BootDiagnostics_Account_Name'), '.blob.core.windows.net/')]" - } - } - }, - "dependsOn": [ - "[resourceId('Microsoft.Network/networkInterfaces', variables('rdssh1nic'))]" - ] - }, - { - "type": "Microsoft.Compute/virtualMachines", - "name": "[parameters('RDS_Session_Host_Desktop_Name')]", - "apiVersion": "2018-06-01", - "location": "[resourceGroup().location]", - "scale": null, - "properties": { - "hardwareProfile": { - "vmSize": "[parameters('RDS_Session_Host_Desktop_VM_Size')]" - }, - "storageProfile": { - "imageReference": { - "publisher": "MicrosoftWindowsServer", - "offer": "WindowsServer", - "sku": "2019-Datacenter", - "version": "latest" - }, - "osDisk": { - "osType": "Windows", - "name": "[concat(parameters('RDS_Session_Host_Desktop_Name'),'-OS-DISK')]", - "createOption": "FromImage", - "caching": "ReadWrite", - "writeAcceleratorEnabled": false, - "managedDisk": { - "storageAccountType": "Standard_LRS" - }, - "diskSizeGB": 128 - }, - "dataDisks": [] - }, - "osProfile": { - "computerName": "[parameters('RDS_Session_Host_Desktop_Name')]", - "adminUsername": "[parameters('Administrator_User')]", - "adminPassword": "[parameters('Administrator_Password')]", - "windowsConfiguration": { - "provisionVMAgent": true, - "enableAutomaticUpdates": true - }, - "secrets": [], - "allowExtensionOperations": true - }, - "networkProfile": { - "networkInterfaces": [{ - "id": "[resourceId('Microsoft.Network/networkInterfaces', variables('rdssh2nic'))]", - "properties": { - "primary": true - } - }] - }, - "diagnosticsProfile": { - "bootDiagnostics": { - "enabled": true, - "storageUri": "[concat('https://', parameters('BootDiagnostics_Account_Name'), '.blob.core.windows.net/')]" - } - } - }, - "dependsOn": [ - "[resourceId('Microsoft.Network/networkInterfaces', variables('rdssh2nic'))]" - ] - }, - { - "type": "Microsoft.Network/networkInterfaces", - "name": "[variables('rdssh1nic')]", - "apiVersion": "2018-10-01", - "location": "[resourceGroup().location]", - "scale": null, - "properties": { - "ipConfigurations": [{ - "name": "ipconfig1", - "properties": { - "privateIPAddress": "[parameters('RDS_Session_Host_Apps_IP_Address')]", - "privateIPAllocationMethod": "Static", - "subnet": { - "id": "[variables('subnet')]" - }, - "primary": true, - "privateIPAddressVersion": "IPv4" - } - }], - "dnsSettings": { - "dnsServers": [], - "appliedDnsServers": [] - }, - "enableAcceleratedNetworking": false, - "enableIPForwarding": false, - "primary": true, - "tapConfigurations": [] - }, - "dependsOn": [] - }, - { - "type": "Microsoft.Network/networkInterfaces", - "name": "[variables('rdssh2nic')]", - "apiVersion": "2018-10-01", - "location": "[resourceGroup().location]", - "scale": null, - "properties": { - "ipConfigurations": [{ - "name": "ipconfig1", - "properties": { - "privateIPAddress": "[parameters('RDS_Session_Host_Desktop_IP_Address')]", - "privateIPAllocationMethod": "Static", - "subnet": { - "id": "[variables('subnet')]" - }, - "primary": true, - "privateIPAddressVersion": "IPv4" - } - }], - "dnsSettings": { - "dnsServers": [], - "appliedDnsServers": [] - }, - "enableAcceleratedNetworking": false, - "enableIPForwarding": false, - "primary": true, - "tapConfigurations": [] - }, - "dependsOn": [] - }, - { - "type": "Microsoft.Network/networkInterfaces", - "name": "[variables('rdsnic')]", - "apiVersion": "2018-10-01", - "location": "[resourceGroup().location]", - "scale": null, - "properties": { - "ipConfigurations": [{ - "name": "ipconfig1", - "properties": { - "privateIPAddress": "[parameters('RDS_Gateway_IP_Address')]", - "privateIPAllocationMethod": "Static", - "publicIPAddress": { - "id": "[resourceId('Microsoft.Network/publicIPAddresses', variables('rdspip'))]" - }, - "subnet": { - "id": "[variables('subnet')]" - }, - "primary": true, - "privateIPAddressVersion": "IPv4" - } - }], - "dnsSettings": { - "dnsServers": [], - "appliedDnsServers": [] - }, - "enableAcceleratedNetworking": false, - "enableIPForwarding": false, - "networkSecurityGroup": { - "id": "[resourceId(parameters('Virtual_Network_Resource_Group'), 'Microsoft.Network/networkSecurityGroups', parameters('NSG_Gateway_Name'))]" - }, - "primary": true, - "tapConfigurations": [] - }, - "dependsOn": [ - "[resourceId('Microsoft.Network/publicIPAddresses', variables('rdspip'))]" - ] - }, - { - "type": "Microsoft.Network/publicIPAddresses", - "sku": { - "name": "Basic", - "tier": "Regional" - }, - "name": "[variables('rdspip')]", - "apiVersion": "2018-10-01", - "location": "[resourceGroup().location]", - "scale": null, - "properties": { - "publicIPAddressVersion": "IPv4", - "publicIPAllocationMethod": "Static", - "idleTimeoutInMinutes": 4, - "ipTags": [] - }, - "dependsOn": [] - }, - { - "type": "Microsoft.Compute/virtualMachines/extensions", - "name": "[concat(parameters('RDS_Gateway_Name'), '/', 'bginfo')]", - "apiVersion": "2018-06-01", - "location": "[resourceGroup().location]", - "scale": null, - "properties": { - "autoUpgradeMinorVersion": true, - "publisher": "Microsoft.Compute", - "type": "bginfo", - "typeHandlerVersion": "2.1" - }, - "dependsOn": [ - "[resourceId('Microsoft.Compute/virtualMachines', parameters('RDS_Gateway_Name'))]" - ] - }, - { - "type": "Microsoft.Compute/virtualMachines/extensions", - "name": "[concat(parameters('RDS_Session_Host_Apps_Name'), '/', 'bginfo')]", - "apiVersion": "2018-06-01", - "location": "[resourceGroup().location]", - "scale": null, - "properties": { - "autoUpgradeMinorVersion": true, - "publisher": "Microsoft.Compute", - "type": "bginfo", - "typeHandlerVersion": "2.1" - }, - "dependsOn": [ - "[resourceId('Microsoft.Compute/virtualMachines', parameters('RDS_Session_Host_Apps_Name'))]" - ] - }, - { - "type": "Microsoft.Compute/virtualMachines/extensions", - "name": "[concat(parameters('RDS_Session_Host_Desktop_Name'), '/', 'bginfo')]", - "apiVersion": "2018-06-01", - "location": "[resourceGroup().location]", - "scale": null, - "properties": { - "autoUpgradeMinorVersion": true, - "publisher": "Microsoft.Compute", - "type": "bginfo", - "typeHandlerVersion": "2.1" - }, - "dependsOn": [ - "[resourceId('Microsoft.Compute/virtualMachines', parameters('RDS_Session_Host_Desktop_Name'))]" - ] - }, - { - "apiVersion": "2018-06-01", - "type": "Microsoft.Compute/virtualMachines/extensions", - "name": "[concat(parameters('RDS_Gateway_Name'),'/joindomain')]", - "location": "[resourceGroup().location]", - "dependsOn": [ - "[resourceId('Microsoft.Compute/virtualMachines', parameters('RDS_Gateway_Name'))]", - "[resourceId('Microsoft.Compute/virtualMachines/extensions', parameters('RDS_Gateway_Name'),'bginfo')]" - ], - "properties": { - "publisher": "Microsoft.Compute", - "type": "JsonADDomainExtension", - "typeHandlerVersion": "1.3", - "autoUpgradeMinorVersion": true, - "settings": { - "Name": "[parameters('Domain_Name')]", - "User": "[concat(parameters('Domain_Name'), '\\', parameters('DC_Administrator_User'))]", - "Restart": "true", - "Options": "3" - }, - "protectedSettings": { - "Password": "[parameters('DC_Administrator_Password')]" - } - } - }, - { - "apiVersion": "2018-06-01", - "type": "Microsoft.Compute/virtualMachines/extensions", - "name": "[concat(parameters('RDS_Session_Host_Apps_Name'),'/joindomain')]", - "location": "[resourceGroup().location]", - "dependsOn": [ - "[resourceId('Microsoft.Compute/virtualMachines', parameters('RDS_Session_Host_Apps_Name'))]", - "[resourceId('Microsoft.Compute/virtualMachines/extensions', parameters('RDS_Session_Host_Apps_Name'),'bginfo')]" - ], - "properties": { - "publisher": "Microsoft.Compute", - "type": "JsonADDomainExtension", - "typeHandlerVersion": "1.3", - "autoUpgradeMinorVersion": true, - "settings": { - "Name": "[parameters('Domain_Name')]", - "User": "[concat(parameters('Domain_Name'), '\\', parameters('DC_Administrator_User'))]", - "Restart": "true", - "Options": "3" - }, - "protectedSettings": { - "Password": "[parameters('DC_Administrator_Password')]" - } - } - }, - { - "apiVersion": "2018-06-01", - "type": "Microsoft.Compute/virtualMachines/extensions", - "name": "[concat(parameters('RDS_Session_Host_Desktop_Name'),'/joindomain')]", - "location": "[resourceGroup().location]", - "dependsOn": [ - "[resourceId('Microsoft.Compute/virtualMachines', parameters('RDS_Session_Host_Desktop_Name'))]", - "[resourceId('Microsoft.Compute/virtualMachines/extensions', parameters('RDS_Session_Host_Desktop_Name'),'bginfo')]" - ], - "properties": { - "publisher": "Microsoft.Compute", - "type": "JsonADDomainExtension", - "typeHandlerVersion": "1.3", - "autoUpgradeMinorVersion": true, - "settings": { - "Name": "[parameters('Domain_Name')]", - "User": "[concat(parameters('Domain_Name'), '\\', parameters('DC_Administrator_User'))]", - "Restart": "true", - "Options": "3" - }, - "protectedSettings": { - "Password": "[parameters('DC_Administrator_Password')]" - } - } - } - ] +{ + "$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "Administrator_Password": { + "type": "securestring", + "metadata": { + "description": "Enter name for VM Administrator_Password" + } + }, + "Administrator_User": { + "type": "string", + "metadata": { + "description": "Enter name for VM Administrator" + } + }, + "BootDiagnostics_Account_Name": { + "type": "string", + "metadata": { + "description": "Enter name of storage account used for boot diagnostics" + } + }, + "DC_Administrator_Password": { + "type": "securestring", + "metadata": { + "description": "Enter name for DC Administrator Password" + } + }, + "DC_Administrator_User": { + "type": "string", + "metadata": { + "description": "Enter name for DC Administrator" + } + }, + "Domain_Name": { + "type": "string", + "metadata": { + "description": "Enter Domain Name" + } + }, + "NSG_Gateway_Name": { + "type": "string", + "metadata": { + "description": "Enter NSG Gateway Name" + } + }, + "RDS_Gateway_IP_Address": { + "type": "string", + "defaultValue": "10.250.x.250", + "metadata": { + "description": "Enter IP address for RDS Gateway VM, must end in 250" + } + }, + "RDS_Gateway_Name": { + "type": "string", + "metadata": { + "description": "Name of the RDS gateway VM" + } + }, + "RDS_Gateway_VM_Size": { + "type": "string", + "defaultValue": "Standard_B2ms", + "allowedValues": [ + "Standard_B2ms", + "Standard_B8ms", + "Standard_D4s_v3", + "Standard_D32_v3", + "Standard_DS2_v2", + "Standard_DS4_v2", + "Standard_DS14_v2", + "Standard_F8s_v2", + "Standard_F16s_v2", + "Standard_F32s_v2" + ], + "metadata": { + "description": "Select size of RDS Gateway VM" + } + }, + "RDS_Session_Host_Apps_IP_Address": { + "type": "string", + "defaultValue": "10.250.x.249", + "metadata": { + "description": "Enter IP address for RDS apps VM, must end in 249" + } + }, + "RDS_Session_Host_Apps_Name": { + "type": "string", + "metadata": { + "description": "Name of the RDS apps session host VM" + } + }, + "RDS_Session_Host_Apps_VM_Size": { + "type": "string", + "defaultValue": "Standard_B2ms", + "allowedValues": [ + "Standard_B2ms", + "Standard_B8ms", + "Standard_D4s_v3", + "Standard_D32_v3", + "Standard_DS2_v2", + "Standard_DS4_v2", + "Standard_DS14_v2", + "Standard_F8s_v2", + "Standard_F16s_v2", + "Standard_F32s_v2" + ], + "metadata": { + "description": "Select size of RDS apps VM" + } + }, + "RDS_Session_Host_Desktop_IP_Address": { + "type": "string", + "defaultValue": "10.250.x.248", + "metadata": { + "description": "Enter IP address for RDS desktop VM, must end in 248" + } + }, + "RDS_Session_Host_Desktop_Name": { + "type": "string", + "metadata": { + "description": "Name of the RDS desktop session host VM" + } + }, + "RDS_Session_Host_Desktop_VM_Size": { + "type": "string", + "defaultValue": "Standard_B2ms", + "allowedValues": [ + "Standard_B2ms", + "Standard_B8ms", + "Standard_D4s_v3", + "Standard_D32_v3", + "Standard_DS2_v2", + "Standard_DS4_v2", + "Standard_DS14_v2", + "Standard_F8s_v2", + "Standard_F16s_v2", + "Standard_F32s_v2" + ], + "metadata": { + "description": "Select size of RDS desktop VM" + } + }, + "RDS_Session_Host_Review_IP_Address": { + "type": "string", + "defaultValue": "10.250.x.248", + "metadata": { + "description": "Enter IP address for RDS review session host VM, must end in 248" + } + }, + "RDS_Session_Host_Review_Name": { + "type": "string", + "metadata": { + "description": "Name of the RDS review session host VM" + } + }, + "RDS_Session_Host_Review_VM_Size": { + "type": "string", + "defaultValue": "Standard_B2ms", + "allowedValues": [ + "Standard_B2ms", + "Standard_B8ms", + "Standard_D4s_v3", + "Standard_D32_v3", + "Standard_DS2_v2", + "Standard_DS4_v2", + "Standard_DS14_v2", + "Standard_F8s_v2", + "Standard_F16s_v2", + "Standard_F32s_v2" + ], + "metadata": { + "description": "Select size of RDS review session host VM" + } + }, + "SRE_ID": { + "type": "string", + "metadata": { + "description": "Enter ID for SRE e.g. 'testsandbox'" + } + }, + "Virtual_Network_Name": { + "type": "string", + "metadata": { + "description": "Enter name of virtual network to provision this VM" + } + }, + "Virtual_Network_Resource_Group": { + "type": "string", + "metadata": { + "description": "Enter name of resource group that is assoicated with the virtual network above" + } + }, + "Virtual_Network_Subnet": { + "type": "string", + "defaultValue": "Subnet-RDS", + "metadata": { + "description": "Enter name of subnet where you want to provision this VM" + } + } + }, + "variables": { + "rdsnic": "[concat(parameters('RDS_Gateway_Name'),'-','NIC')]", + "rdssh1nic": "[concat(parameters('RDS_Session_Host_Apps_Name'),'-','NIC')]", + "rdssh2nic": "[concat(parameters('RDS_Session_Host_Desktop_Name'),'-','NIC')]", + "rdssh3nic": "[concat(parameters('RDS_Session_Host_Review_Name'),'-','NIC')]", + "rdspip": "[concat(parameters('RDS_Gateway_Name'),'-','PIP')]", + "vnetID": "[resourceId(parameters('Virtual_Network_Resource_Group'), 'Microsoft.Network/virtualNetworks', parameters('Virtual_Network_Name'))]", + "subnet": "[concat(variables('vnetID'),'/subnets/', parameters('Virtual_Network_Subnet'))]" + }, + "resources": [{ + "type": "Microsoft.Compute/virtualMachines", + "name": "[parameters('RDS_Gateway_Name')]", + "apiVersion": "2018-06-01", + "location": "[resourceGroup().location]", + "scale": null, + "properties": { + "hardwareProfile": { + "vmSize": "[parameters('RDS_Gateway_VM_Size')]" + }, + "storageProfile": { + "imageReference": { + "publisher": "MicrosoftWindowsServer", + "offer": "WindowsServer", + "sku": "2019-Datacenter", + "version": "latest" + }, + "osDisk": { + "osType": "Windows", + "name": "[concat(parameters('RDS_Gateway_Name'),'-OS-DISK')]", + "createOption": "FromImage", + "caching": "ReadWrite", + "writeAcceleratorEnabled": false, + "managedDisk": { + "storageAccountType": "Standard_LRS" + }, + "diskSizeGB": 128 + }, + "dataDisks": [{ + "lun": 0, + "name": "[concat(parameters('RDS_Gateway_Name'),'-DATA-DISK-1')]", + "createOption": "Empty", + "caching": "None", + "writeAcceleratorEnabled": false, + "managedDisk": { + "storageAccountType": "Standard_LRS" + }, + "diskSizeGB": 511 + }, + { + "lun": 1, + "name": "[concat(parameters('RDS_Gateway_Name'),'-DATA-DISK-2')]", + "createOption": "Empty", + "caching": "None", + "writeAcceleratorEnabled": false, + "managedDisk": { + "storageAccountType": "Standard_LRS" + }, + "diskSizeGB": 511 + }, + { + "lun": 1, + "name": "[concat(parameters('RDS_Gateway_Name'),'-DATA-DISK-3')]", + "createOption": "Empty", + "caching": "None", + "writeAcceleratorEnabled": false, + "managedDisk": { + "storageAccountType": "Standard_LRS" + }, + "diskSizeGB": 511 + } + ] + }, + "osProfile": { + "computerName": "[parameters('RDS_Gateway_Name')]", + "adminUsername": "[parameters('Administrator_User')]", + "adminPassword": "[parameters('Administrator_Password')]", + "windowsConfiguration": { + "provisionVMAgent": true, + "enableAutomaticUpdates": true + }, + "secrets": [], + "allowExtensionOperations": true + }, + "networkProfile": { + "networkInterfaces": [{ + "id": "[resourceId('Microsoft.Network/networkInterfaces', variables('rdsnic'))]", + "properties": { + "primary": true + } + }] + }, + "diagnosticsProfile": { + "bootDiagnostics": { + "enabled": true, + "storageUri": "[concat('https://', parameters('BootDiagnostics_Account_Name'), '.blob.core.windows.net/')]" + } + } + }, + "dependsOn": [ + "[resourceId('Microsoft.Network/networkInterfaces', variables('rdsnic'))]" + ] + }, + { + "type": "Microsoft.Compute/virtualMachines", + "name": "[parameters('RDS_Session_Host_Apps_Name')]", + "apiVersion": "2018-06-01", + "location": "[resourceGroup().location]", + "scale": null, + "properties": { + "hardwareProfile": { + "vmSize": "[parameters('RDS_Session_Host_Apps_VM_Size')]" + }, + "storageProfile": { + "imageReference": { + "publisher": "MicrosoftWindowsServer", + "offer": "WindowsServer", + "sku": "2019-Datacenter", + "version": "latest" + }, + "osDisk": { + "osType": "Windows", + "name": "[concat(parameters('RDS_Session_Host_Apps_Name'),'-OS-DISK')]", + "createOption": "FromImage", + "caching": "ReadWrite", + "writeAcceleratorEnabled": false, + "managedDisk": { + "storageAccountType": "Standard_LRS" + }, + "diskSizeGB": 128 + }, + "dataDisks": [] + }, + "osProfile": { + "computerName": "[parameters('RDS_Session_Host_Apps_Name')]", + "adminUsername": "[parameters('Administrator_User')]", + "adminPassword": "[parameters('Administrator_Password')]", + "windowsConfiguration": { + "provisionVMAgent": true, + "enableAutomaticUpdates": true + }, + "secrets": [], + "allowExtensionOperations": true + }, + "networkProfile": { + "networkInterfaces": [{ + "id": "[resourceId('Microsoft.Network/networkInterfaces', variables('rdssh1nic'))]", + "properties": { + "primary": true + } + }] + }, + "diagnosticsProfile": { + "bootDiagnostics": { + "enabled": true, + "storageUri": "[concat('https://', parameters('BootDiagnostics_Account_Name'), '.blob.core.windows.net/')]" + } + } + }, + "dependsOn": [ + "[resourceId('Microsoft.Network/networkInterfaces', variables('rdssh1nic'))]" + ] + }, + { + "type": "Microsoft.Compute/virtualMachines", + "name": "[parameters('RDS_Session_Host_Desktop_Name')]", + "apiVersion": "2018-06-01", + "location": "[resourceGroup().location]", + "scale": null, + "properties": { + "hardwareProfile": { + "vmSize": "[parameters('RDS_Session_Host_Desktop_VM_Size')]" + }, + "storageProfile": { + "imageReference": { + "publisher": "MicrosoftWindowsServer", + "offer": "WindowsServer", + "sku": "2019-Datacenter", + "version": "latest" + }, + "osDisk": { + "osType": "Windows", + "name": "[concat(parameters('RDS_Session_Host_Desktop_Name'),'-OS-DISK')]", + "createOption": "FromImage", + "caching": "ReadWrite", + "writeAcceleratorEnabled": false, + "managedDisk": { + "storageAccountType": "Standard_LRS" + }, + "diskSizeGB": 128 + }, + "dataDisks": [] + }, + "osProfile": { + "computerName": "[parameters('RDS_Session_Host_Desktop_Name')]", + "adminUsername": "[parameters('Administrator_User')]", + "adminPassword": "[parameters('Administrator_Password')]", + "windowsConfiguration": { + "provisionVMAgent": true, + "enableAutomaticUpdates": true + }, + "secrets": [], + "allowExtensionOperations": true + }, + "networkProfile": { + "networkInterfaces": [{ + "id": "[resourceId('Microsoft.Network/networkInterfaces', variables('rdssh2nic'))]", + "properties": { + "primary": true + } + }] + }, + "diagnosticsProfile": { + "bootDiagnostics": { + "enabled": true, + "storageUri": "[concat('https://', parameters('BootDiagnostics_Account_Name'), '.blob.core.windows.net/')]" + } + } + }, + "dependsOn": [ + "[resourceId('Microsoft.Network/networkInterfaces', variables('rdssh2nic'))]" + ] + }, + { + "type": "Microsoft.Compute/virtualMachines", + "name": "[parameters('RDS_Session_Host_Review_Name')]", + "apiVersion": "2018-06-01", + "location": "[resourceGroup().location]", + "scale": null, + "properties": { + "hardwareProfile": { + "vmSize": "[parameters('RDS_Session_Host_Review_VM_Size')]" + }, + "storageProfile": { + "imageReference": { + "publisher": "MicrosoftWindowsServer", + "offer": "WindowsServer", + "sku": "2019-Datacenter", + "version": "latest" + }, + "osDisk": { + "osType": "Windows", + "name": "[concat(parameters('RDS_Session_Host_Review_Name'),'-OS-DISK')]", + "createOption": "FromImage", + "caching": "ReadWrite", + "writeAcceleratorEnabled": false, + "managedDisk": { + "storageAccountType": "Standard_LRS" + }, + "diskSizeGB": 128 + }, + "dataDisks": [] + }, + "osProfile": { + "computerName": "[parameters('RDS_Session_Host_Review_Name')]", + "adminUsername": "[parameters('Administrator_User')]", + "adminPassword": "[parameters('Administrator_Password')]", + "windowsConfiguration": { + "provisionVMAgent": true, + "enableAutomaticUpdates": true + }, + "secrets": [], + "allowExtensionOperations": true + }, + "networkProfile": { + "networkInterfaces": [{ + "id": "[resourceId('Microsoft.Network/networkInterfaces', variables('rdssh3nic'))]", + "properties": { + "primary": true + } + }] + }, + "diagnosticsProfile": { + "bootDiagnostics": { + "enabled": true, + "storageUri": "[concat('https://', parameters('BootDiagnostics_Account_Name'), '.blob.core.windows.net/')]" + } + } + }, + "dependsOn": [ + "[resourceId('Microsoft.Network/networkInterfaces', variables('rdssh3nic'))]" + ] + }, + { + "type": "Microsoft.Network/networkInterfaces", + "name": "[variables('rdssh1nic')]", + "apiVersion": "2018-10-01", + "location": "[resourceGroup().location]", + "scale": null, + "properties": { + "ipConfigurations": [{ + "name": "ipconfig1", + "properties": { + "privateIPAddress": "[parameters('RDS_Session_Host_Apps_IP_Address')]", + "privateIPAllocationMethod": "Static", + "subnet": { + "id": "[variables('subnet')]" + }, + "primary": true, + "privateIPAddressVersion": "IPv4" + } + }], + "dnsSettings": { + "dnsServers": [], + "appliedDnsServers": [] + }, + "enableAcceleratedNetworking": false, + "enableIPForwarding": false, + "primary": true, + "tapConfigurations": [] + }, + "dependsOn": [] + }, + { + "type": "Microsoft.Network/networkInterfaces", + "name": "[variables('rdssh2nic')]", + "apiVersion": "2018-10-01", + "location": "[resourceGroup().location]", + "scale": null, + "properties": { + "ipConfigurations": [{ + "name": "ipconfig1", + "properties": { + "privateIPAddress": "[parameters('RDS_Session_Host_Desktop_IP_Address')]", + "privateIPAllocationMethod": "Static", + "subnet": { + "id": "[variables('subnet')]" + }, + "primary": true, + "privateIPAddressVersion": "IPv4" + } + }], + "dnsSettings": { + "dnsServers": [], + "appliedDnsServers": [] + }, + "enableAcceleratedNetworking": false, + "enableIPForwarding": false, + "primary": true, + "tapConfigurations": [] + }, + "dependsOn": [] + }, + { + "type": "Microsoft.Network/networkInterfaces", + "name": "[variables('rdssh3nic')]", + "apiVersion": "2018-10-01", + "location": "[resourceGroup().location]", + "scale": null, + "properties": { + "ipConfigurations": [{ + "name": "ipconfig1", + "properties": { + "privateIPAddress": "[parameters('RDS_Session_Host_Review_IP_Address')]", + "privateIPAllocationMethod": "Static", + "subnet": { + "id": "[variables('subnet')]" + }, + "primary": true, + "privateIPAddressVersion": "IPv4" + } + }], + "dnsSettings": { + "dnsServers": [], + "appliedDnsServers": [] + }, + "enableAcceleratedNetworking": false, + "enableIPForwarding": false, + "primary": true, + "tapConfigurations": [] + }, + "dependsOn": [] + }, + { + "type": "Microsoft.Network/networkInterfaces", + "name": "[variables('rdsnic')]", + "apiVersion": "2018-10-01", + "location": "[resourceGroup().location]", + "scale": null, + "properties": { + "ipConfigurations": [{ + "name": "ipconfig1", + "properties": { + "privateIPAddress": "[parameters('RDS_Gateway_IP_Address')]", + "privateIPAllocationMethod": "Static", + "publicIPAddress": { + "id": "[resourceId('Microsoft.Network/publicIPAddresses', variables('rdspip'))]" + }, + "subnet": { + "id": "[variables('subnet')]" + }, + "primary": true, + "privateIPAddressVersion": "IPv4" + } + }], + "dnsSettings": { + "dnsServers": [], + "appliedDnsServers": [] + }, + "enableAcceleratedNetworking": false, + "enableIPForwarding": false, + "networkSecurityGroup": { + "id": "[resourceId(parameters('Virtual_Network_Resource_Group'), 'Microsoft.Network/networkSecurityGroups', parameters('NSG_Gateway_Name'))]" + }, + "primary": true, + "tapConfigurations": [] + }, + "dependsOn": [ + "[resourceId('Microsoft.Network/publicIPAddresses', variables('rdspip'))]" + ] + }, + { + "type": "Microsoft.Network/publicIPAddresses", + "sku": { + "name": "Basic", + "tier": "Regional" + }, + "name": "[variables('rdspip')]", + "apiVersion": "2018-10-01", + "location": "[resourceGroup().location]", + "scale": null, + "properties": { + "publicIPAddressVersion": "IPv4", + "publicIPAllocationMethod": "Static", + "idleTimeoutInMinutes": 4, + "ipTags": [] + }, + "dependsOn": [] + }, + { + "type": "Microsoft.Compute/virtualMachines/extensions", + "name": "[concat(parameters('RDS_Gateway_Name'), '/', 'bginfo')]", + "apiVersion": "2018-06-01", + "location": "[resourceGroup().location]", + "scale": null, + "properties": { + "autoUpgradeMinorVersion": true, + "publisher": "Microsoft.Compute", + "type": "bginfo", + "typeHandlerVersion": "2.1" + }, + "dependsOn": [ + "[resourceId('Microsoft.Compute/virtualMachines', parameters('RDS_Gateway_Name'))]" + ] + }, + { + "type": "Microsoft.Compute/virtualMachines/extensions", + "name": "[concat(parameters('RDS_Session_Host_Apps_Name'), '/', 'bginfo')]", + "apiVersion": "2018-06-01", + "location": "[resourceGroup().location]", + "scale": null, + "properties": { + "autoUpgradeMinorVersion": true, + "publisher": "Microsoft.Compute", + "type": "bginfo", + "typeHandlerVersion": "2.1" + }, + "dependsOn": [ + "[resourceId('Microsoft.Compute/virtualMachines', parameters('RDS_Session_Host_Apps_Name'))]" + ] + }, + { + "type": "Microsoft.Compute/virtualMachines/extensions", + "name": "[concat(parameters('RDS_Session_Host_Desktop_Name'), '/', 'bginfo')]", + "apiVersion": "2018-06-01", + "location": "[resourceGroup().location]", + "scale": null, + "properties": { + "autoUpgradeMinorVersion": true, + "publisher": "Microsoft.Compute", + "type": "bginfo", + "typeHandlerVersion": "2.1" + }, + "dependsOn": [ + "[resourceId('Microsoft.Compute/virtualMachines', parameters('RDS_Session_Host_Desktop_Name'))]" + ] + }, + { + "type": "Microsoft.Compute/virtualMachines/extensions", + "name": "[concat(parameters('RDS_Session_Host_Review_Name'), '/', 'bginfo')]", + "apiVersion": "2018-06-01", + "location": "[resourceGroup().location]", + "scale": null, + "properties": { + "autoUpgradeMinorVersion": true, + "publisher": "Microsoft.Compute", + "type": "bginfo", + "typeHandlerVersion": "2.1" + }, + "dependsOn": [ + "[resourceId('Microsoft.Compute/virtualMachines', parameters('RDS_Session_Host_Review_Name'))]" + ] + }, + { + "apiVersion": "2018-06-01", + "type": "Microsoft.Compute/virtualMachines/extensions", + "name": "[concat(parameters('RDS_Gateway_Name'),'/joindomain')]", + "location": "[resourceGroup().location]", + "dependsOn": [ + "[resourceId('Microsoft.Compute/virtualMachines', parameters('RDS_Gateway_Name'))]", + "[resourceId('Microsoft.Compute/virtualMachines/extensions', parameters('RDS_Gateway_Name'),'bginfo')]" + ], + "properties": { + "publisher": "Microsoft.Compute", + "type": "JsonADDomainExtension", + "typeHandlerVersion": "1.3", + "autoUpgradeMinorVersion": true, + "settings": { + "Name": "[parameters('Domain_Name')]", + "User": "[concat(parameters('Domain_Name'), '\\', parameters('DC_Administrator_User'))]", + "Restart": "true", + "Options": "3" + }, + "protectedSettings": { + "Password": "[parameters('DC_Administrator_Password')]" + } + } + }, + { + "apiVersion": "2018-06-01", + "type": "Microsoft.Compute/virtualMachines/extensions", + "name": "[concat(parameters('RDS_Session_Host_Apps_Name'),'/joindomain')]", + "location": "[resourceGroup().location]", + "dependsOn": [ + "[resourceId('Microsoft.Compute/virtualMachines', parameters('RDS_Session_Host_Apps_Name'))]", + "[resourceId('Microsoft.Compute/virtualMachines/extensions', parameters('RDS_Session_Host_Apps_Name'),'bginfo')]" + ], + "properties": { + "publisher": "Microsoft.Compute", + "type": "JsonADDomainExtension", + "typeHandlerVersion": "1.3", + "autoUpgradeMinorVersion": true, + "settings": { + "Name": "[parameters('Domain_Name')]", + "User": "[concat(parameters('Domain_Name'), '\\', parameters('DC_Administrator_User'))]", + "Restart": "true", + "Options": "3" + }, + "protectedSettings": { + "Password": "[parameters('DC_Administrator_Password')]" + } + } + }, + { + "apiVersion": "2018-06-01", + "type": "Microsoft.Compute/virtualMachines/extensions", + "name": "[concat(parameters('RDS_Session_Host_Desktop_Name'),'/joindomain')]", + "location": "[resourceGroup().location]", + "dependsOn": [ + "[resourceId('Microsoft.Compute/virtualMachines', parameters('RDS_Session_Host_Desktop_Name'))]", + "[resourceId('Microsoft.Compute/virtualMachines/extensions', parameters('RDS_Session_Host_Desktop_Name'),'bginfo')]" + ], + "properties": { + "publisher": "Microsoft.Compute", + "type": "JsonADDomainExtension", + "typeHandlerVersion": "1.3", + "autoUpgradeMinorVersion": true, + "settings": { + "Name": "[parameters('Domain_Name')]", + "User": "[concat(parameters('Domain_Name'), '\\', parameters('DC_Administrator_User'))]", + "Restart": "true", + "Options": "3" + }, + "protectedSettings": { + "Password": "[parameters('DC_Administrator_Password')]" + } + } + }, + { + "apiVersion": "2018-06-01", + "type": "Microsoft.Compute/virtualMachines/extensions", + "name": "[concat(parameters('RDS_Session_Host_Review_Name'),'/joindomain')]", + "location": "[resourceGroup().location]", + "dependsOn": [ + "[resourceId('Microsoft.Compute/virtualMachines', parameters('RDS_Session_Host_Review_Name'))]", + "[resourceId('Microsoft.Compute/virtualMachines/extensions', parameters('RDS_Session_Host_Review_Name'),'bginfo')]" + ], + "properties": { + "publisher": "Microsoft.Compute", + "type": "JsonADDomainExtension", + "typeHandlerVersion": "1.3", + "autoUpgradeMinorVersion": true, + "settings": { + "Name": "[parameters('Domain_Name')]", + "User": "[concat(parameters('Domain_Name'), '\\', parameters('DC_Administrator_User'))]", + "Restart": "true", + "Options": "3" + }, + "protectedSettings": { + "Password": "[parameters('DC_Administrator_Password')]" + } + } + }, + ] } \ No newline at end of file diff --git a/deployment/secure_research_environment/arm_templates/sre-webapps-template.json b/deployment/secure_research_environment/arm_templates/sre-webapps-template.json index 20cb13fe2f..52d9b78c88 100644 --- a/deployment/secure_research_environment/arm_templates/sre-webapps-template.json +++ b/deployment/secure_research_environment/arm_templates/sre-webapps-template.json @@ -1,299 +1,299 @@ -{ - "$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#", - "contentVersion": "1.0.0.0", - "parameters": { - "Administrator_Password": { - "type": "securestring", - "metadata": { - "description": "Enter name for VM Administrator_Password" - } - }, - "Administrator_User": { - "type": "string", - "metadata": { - "description": "Enter name for VM Administrator" - } - }, - "BootDiagnostics_Account_Name": { - "type": "string", - "metadata": { - "description": "Enter name of storage account used for boot diagnostics" - } - }, - "GitLab_Cloud_Init": { - "type": "string", - "defaultValue": "", - "metadata": { - "description": "String passed down to the Virtual Machine." - } - }, - "GitLab_IP_Address": { - "type": "string", - "defaultValue": "10.250.x.153", - "metadata": { - "description": "Enter IP address for VM, must end in 153" - } - }, - "GitLab_Server_Name": { - "defaultValue": "GITLAB", - "type": "string" - }, - "GitLab_VM_Size": { - "type": "string", - "defaultValue": "Standard_B2ms", - "allowedValues": [ - "Standard_B2ms", - "Standard_D2s_v3", - "Standard_DS2_v2", - "Standard_DS3_v2", - "Standard_F4s_v2" - ], - "metadata": { - "description": "Select size of VM" - } - }, - "HackMD_Cloud_Init": { - "type": "string", - "defaultValue": "", - "metadata": { - "description": "String passed down to the Virtual Machine." - } - }, - "HackMD_IP_Address": { - "type": "string", - "defaultValue": "10.250.x.154", - "metadata": { - "description": "Enter IP address for VM, must end in 154" - } - }, - "HackMD_Server_Name": { - "defaultValue": "HACKMD", - "type": "string" - }, - "HackMD_VM_Size": { - "type": "string", - "defaultValue": "Standard_B2ms", - "allowedValues": [ - "Standard_B2ms", - "Standard_D2s_v3", - "Standard_DS2_v2", - "Standard_DS3_v2", - "Standard_F4s_v2" - ], - "metadata": { - "description": "Select size of VM" - } - }, - "Virtual_Network_Name": { - "type": "string", - "defaultValue": "DSG_DSGROUPX_VNET1", - "metadata": { - "description": "Enter name of virtual network to provision this VM" - } - }, - "Virtual_Network_Resource_Group": { - "type": "string", - "metadata": { - "description": "Enter name of resource group that is associated with the virtual network above" - } - }, - "Virtual_Network_Subnet": { - "type": "string", - "defaultValue": "Subnet-Data", - "metadata": { - "description": "Enter name of subnet where you want to provision this VM" - } - } - }, - "variables": { - "gitlabnic": "[concat(parameters('GitLab_Server_Name'),'-','NIC')]", - "hackmdnic": "[concat(parameters('HackMD_Server_Name'),'-','NIC')]", - "vnetID": "[resourceId(parameters('Virtual_Network_Resource_Group'), 'Microsoft.Network/virtualNetworks', parameters('Virtual_Network_Name'))]", - "subnet": "[concat(variables('vnetID'),'/subnets/', parameters('Virtual_Network_Subnet'))]" - }, - "resources": [{ - "type": "Microsoft.Compute/virtualMachines", - "name": "[parameters('GitLab_Server_Name')]", - "apiVersion": "2018-06-01", - "location": "[resourceGroup().location]", - "scale": null, - "properties": { - "hardwareProfile": { - "vmSize": "[parameters('GitLab_VM_Size')]" - }, - "storageProfile": { - "imageReference": { - "publisher": "Canonical", - "offer": "UbuntuServer", - "sku": "18.04-LTS", - "version": "latest" - }, - "osDisk": { - "osType": "Linux", - "name": "[concat(parameters('GitLab_Server_Name'),'-OS-DISK')]", - "createOption": "FromImage", - "caching": "ReadWrite", - "writeAcceleratorEnabled": false, - "managedDisk": { - "storageAccountType": "Standard_LRS" - }, - "diskSizeGB": 50 - }, - "dataDisks": [{ - "lun": 0, - "name": "[concat(parameters('GitLab_Server_Name'),'-DATA-DISK')]", - "createOption": "Empty", - "caching": "None", - "writeAcceleratorEnabled": false, - "managedDisk": { - "storageAccountType": "Standard_LRS" - }, - "diskSizeGB": 750 - }] - }, - "osProfile": { - "computerName": "[parameters('GitLab_Server_Name')]", - "adminUsername": "[parameters('Administrator_User')]", - "adminPassword": "[parameters('Administrator_Password')]", - "linuxConfiguration": { - "disablePasswordAuthentication": false, - "provisionVMAgent": true - }, - "secrets": [], - "allowExtensionOperations": true, - "customData": "[parameters('GitLab_Cloud_Init')]" - }, - "networkProfile": { - "networkInterfaces": [{ - "id": "[resourceId('Microsoft.Network/networkInterfaces', variables('gitlabnic'))]", - "properties": { - "primary": true - } - }] - }, - "diagnosticsProfile": { - "bootDiagnostics": { - "enabled": true, - "storageUri": "[concat('https', '://', parameters('BootDiagnostics_Account_Name'), '.blob.core.windows.net', '/')]" - } - } - }, - "dependsOn": [ - "[resourceId('Microsoft.Network/networkInterfaces', variables('gitlabnic'))]" - ] - }, - { - "type": "Microsoft.Compute/virtualMachines", - "name": "[parameters('HackMD_Server_Name')]", - "apiVersion": "2018-06-01", - "location": "[resourceGroup().location]", - "scale": null, - "properties": { - "hardwareProfile": { - "vmSize": "[parameters('HackMD_VM_Size')]" - }, - "storageProfile": { - "imageReference": { - "publisher": "Canonical", - "offer": "UbuntuServer", - "sku": "18.04-LTS", - "version": "latest" - }, - "osDisk": { - "osType": "Linux", - "name": "[concat(parameters('HackMD_Server_Name'),'-OS-DISK')]", - "createOption": "FromImage", - "caching": "ReadWrite", - "writeAcceleratorEnabled": false, - "managedDisk": { - "storageAccountType": "Standard_LRS" - }, - "diskSizeGB": 750 - }, - "dataDisks": [] - }, - "osProfile": { - "computerName": "[parameters('HackMD_Server_Name')]", - "adminUsername": "[parameters('Administrator_User')]", - "adminPassword": "[parameters('Administrator_Password')]", - "linuxConfiguration": { - "disablePasswordAuthentication": false, - "provisionVMAgent": true - }, - "secrets": [], - "allowExtensionOperations": true, - "customData": "[parameters('HackMD_Cloud_Init')]" - }, - "networkProfile": { - "networkInterfaces": [{ - "id": "[resourceId('Microsoft.Network/networkInterfaces', variables('hackmdnic'))]", - "properties": { - "primary": true - } - }] - }, - "diagnosticsProfile": { - "bootDiagnostics": { - "enabled": true, - "storageUri": "[concat('https', '://', parameters('BootDiagnostics_Account_Name'), '.blob.core.windows.net', '/')]" - } - } - }, - "dependsOn": [ - "[resourceId('Microsoft.Network/networkInterfaces', variables('hackmdnic'))]" - ] - }, - { - "type": "Microsoft.Network/networkInterfaces", - "name": "[variables('gitlabnic')]", - "apiVersion": "2018-10-01", - "location": "[resourceGroup().location]", - "scale": null, - "properties": { - "ipConfigurations": [{ - "name": "ipconfig1", - "properties": { - "privateIPAddress": "[parameters('GitLab_IP_Address')]", - "privateIPAllocationMethod": "Static", - "subnet": { - "id": "[variables('subnet')]" - }, - "primary": true, - "privateIPAddressVersion": "IPv4" - } - }], - "enableAcceleratedNetworking": false, - "enableIPForwarding": false, - "primary": true, - "tapConfigurations": [] - }, - "dependsOn": [] - }, - { - "type": "Microsoft.Network/networkInterfaces", - "name": "[variables('hackmdnic')]", - "apiVersion": "2018-10-01", - "location": "[resourceGroup().location]", - "scale": null, - "properties": { - "ipConfigurations": [{ - "name": "ipconfig1", - "properties": { - "privateIPAddress": "[parameters('HackMD_IP_Address')]", - "privateIPAllocationMethod": "Static", - "subnet": { - "id": "[variables('subnet')]" - }, - "primary": true, - "privateIPAddressVersion": "IPv4" - } - }], - "enableAcceleratedNetworking": false, - "enableIPForwarding": false, - "primary": true, - "tapConfigurations": [] - }, - "dependsOn": [] - } - ] +{ + "$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "Administrator_Password": { + "type": "securestring", + "metadata": { + "description": "Enter name for VM Administrator_Password" + } + }, + "Administrator_User": { + "type": "string", + "metadata": { + "description": "Enter name for VM Administrator" + } + }, + "BootDiagnostics_Account_Name": { + "type": "string", + "metadata": { + "description": "Enter name of storage account used for boot diagnostics" + } + }, + "GitLab_Cloud_Init": { + "type": "string", + "defaultValue": "", + "metadata": { + "description": "String passed down to the Virtual Machine." + } + }, + "GitLab_IP_Address": { + "type": "string", + "defaultValue": "10.250.x.153", + "metadata": { + "description": "Enter IP address for VM, must end in 153" + } + }, + "GitLab_Server_Name": { + "defaultValue": "GITLAB", + "type": "string" + }, + "GitLab_VM_Size": { + "type": "string", + "defaultValue": "Standard_B2ms", + "allowedValues": [ + "Standard_B2ms", + "Standard_D2s_v3", + "Standard_DS2_v2", + "Standard_DS3_v2", + "Standard_F4s_v2" + ], + "metadata": { + "description": "Select size of VM" + } + }, + "HackMD_Cloud_Init": { + "type": "string", + "defaultValue": "", + "metadata": { + "description": "String passed down to the Virtual Machine." + } + }, + "HackMD_IP_Address": { + "type": "string", + "defaultValue": "10.250.x.154", + "metadata": { + "description": "Enter IP address for VM, must end in 154" + } + }, + "HackMD_Server_Name": { + "defaultValue": "HACKMD", + "type": "string" + }, + "HackMD_VM_Size": { + "type": "string", + "defaultValue": "Standard_B2ms", + "allowedValues": [ + "Standard_B2ms", + "Standard_D2s_v3", + "Standard_DS2_v2", + "Standard_DS3_v2", + "Standard_F4s_v2" + ], + "metadata": { + "description": "Select size of VM" + } + }, + "Virtual_Network_Name": { + "type": "string", + "defaultValue": "DSG_DSGROUPX_VNET1", + "metadata": { + "description": "Enter name of virtual network to provision this VM" + } + }, + "Virtual_Network_Resource_Group": { + "type": "string", + "metadata": { + "description": "Enter name of resource group that is associated with the virtual network above" + } + }, + "Virtual_Network_Subnet": { + "type": "string", + "defaultValue": "Subnet-Data", + "metadata": { + "description": "Enter name of subnet where you want to provision this VM" + } + } + }, + "variables": { + "gitlabnic": "[concat(parameters('GitLab_Server_Name'),'-','NIC')]", + "hackmdnic": "[concat(parameters('HackMD_Server_Name'),'-','NIC')]", + "vnetID": "[resourceId(parameters('Virtual_Network_Resource_Group'), 'Microsoft.Network/virtualNetworks', parameters('Virtual_Network_Name'))]", + "subnet": "[concat(variables('vnetID'),'/subnets/', parameters('Virtual_Network_Subnet'))]" + }, + "resources": [{ + "type": "Microsoft.Compute/virtualMachines", + "name": "[parameters('GitLab_Server_Name')]", + "apiVersion": "2018-06-01", + "location": "[resourceGroup().location]", + "scale": null, + "properties": { + "hardwareProfile": { + "vmSize": "[parameters('GitLab_VM_Size')]" + }, + "storageProfile": { + "imageReference": { + "publisher": "Canonical", + "offer": "UbuntuServer", + "sku": "18.04-LTS", + "version": "latest" + }, + "osDisk": { + "osType": "Linux", + "name": "[concat(parameters('GitLab_Server_Name'),'-OS-DISK')]", + "createOption": "FromImage", + "caching": "ReadWrite", + "writeAcceleratorEnabled": false, + "managedDisk": { + "storageAccountType": "Standard_LRS" + }, + "diskSizeGB": 50 + }, + "dataDisks": [{ + "lun": 0, + "name": "[concat(parameters('GitLab_Server_Name'),'-DATA-DISK')]", + "createOption": "Empty", + "caching": "None", + "writeAcceleratorEnabled": false, + "managedDisk": { + "storageAccountType": "Standard_LRS" + }, + "diskSizeGB": 750 + }] + }, + "osProfile": { + "computerName": "[parameters('GitLab_Server_Name')]", + "adminUsername": "[parameters('Administrator_User')]", + "adminPassword": "[parameters('Administrator_Password')]", + "linuxConfiguration": { + "disablePasswordAuthentication": false, + "provisionVMAgent": true + }, + "secrets": [], + "allowExtensionOperations": true, + "customData": "[parameters('GitLab_Cloud_Init')]" + }, + "networkProfile": { + "networkInterfaces": [{ + "id": "[resourceId('Microsoft.Network/networkInterfaces', variables('gitlabnic'))]", + "properties": { + "primary": true + } + }] + }, + "diagnosticsProfile": { + "bootDiagnostics": { + "enabled": true, + "storageUri": "[concat('https', '://', parameters('BootDiagnostics_Account_Name'), '.blob.core.windows.net', '/')]" + } + } + }, + "dependsOn": [ + "[resourceId('Microsoft.Network/networkInterfaces', variables('gitlabnic'))]" + ] + }, + { + "type": "Microsoft.Compute/virtualMachines", + "name": "[parameters('HackMD_Server_Name')]", + "apiVersion": "2018-06-01", + "location": "[resourceGroup().location]", + "scale": null, + "properties": { + "hardwareProfile": { + "vmSize": "[parameters('HackMD_VM_Size')]" + }, + "storageProfile": { + "imageReference": { + "publisher": "Canonical", + "offer": "UbuntuServer", + "sku": "18.04-LTS", + "version": "latest" + }, + "osDisk": { + "osType": "Linux", + "name": "[concat(parameters('HackMD_Server_Name'),'-OS-DISK')]", + "createOption": "FromImage", + "caching": "ReadWrite", + "writeAcceleratorEnabled": false, + "managedDisk": { + "storageAccountType": "Standard_LRS" + }, + "diskSizeGB": 750 + }, + "dataDisks": [] + }, + "osProfile": { + "computerName": "[parameters('HackMD_Server_Name')]", + "adminUsername": "[parameters('Administrator_User')]", + "adminPassword": "[parameters('Administrator_Password')]", + "linuxConfiguration": { + "disablePasswordAuthentication": false, + "provisionVMAgent": true + }, + "secrets": [], + "allowExtensionOperations": true, + "customData": "[parameters('HackMD_Cloud_Init')]" + }, + "networkProfile": { + "networkInterfaces": [{ + "id": "[resourceId('Microsoft.Network/networkInterfaces', variables('hackmdnic'))]", + "properties": { + "primary": true + } + }] + }, + "diagnosticsProfile": { + "bootDiagnostics": { + "enabled": true, + "storageUri": "[concat('https', '://', parameters('BootDiagnostics_Account_Name'), '.blob.core.windows.net', '/')]" + } + } + }, + "dependsOn": [ + "[resourceId('Microsoft.Network/networkInterfaces', variables('hackmdnic'))]" + ] + }, + { + "type": "Microsoft.Network/networkInterfaces", + "name": "[variables('gitlabnic')]", + "apiVersion": "2018-10-01", + "location": "[resourceGroup().location]", + "scale": null, + "properties": { + "ipConfigurations": [{ + "name": "ipconfig1", + "properties": { + "privateIPAddress": "[parameters('GitLab_IP_Address')]", + "privateIPAllocationMethod": "Static", + "subnet": { + "id": "[variables('subnet')]" + }, + "primary": true, + "privateIPAddressVersion": "IPv4" + } + }], + "enableAcceleratedNetworking": false, + "enableIPForwarding": false, + "primary": true, + "tapConfigurations": [] + }, + "dependsOn": [] + }, + { + "type": "Microsoft.Network/networkInterfaces", + "name": "[variables('hackmdnic')]", + "apiVersion": "2018-10-01", + "location": "[resourceGroup().location]", + "scale": null, + "properties": { + "ipConfigurations": [{ + "name": "ipconfig1", + "properties": { + "privateIPAddress": "[parameters('HackMD_IP_Address')]", + "privateIPAllocationMethod": "Static", + "subnet": { + "id": "[variables('subnet')]" + }, + "primary": true, + "privateIPAddressVersion": "IPv4" + } + }], + "enableAcceleratedNetworking": false, + "enableIPForwarding": false, + "primary": true, + "tapConfigurations": [] + }, + "dependsOn": [] + } + ] } \ No newline at end of file diff --git a/deployment/secure_research_environment/remote/create_rds/scripts/Move_RDS_VMs_Into_OUs.ps1 b/deployment/secure_research_environment/remote/create_rds/scripts/Move_RDS_VMs_Into_OUs.ps1 index ab517502aa..a4ef9b595a 100644 --- a/deployment/secure_research_environment/remote/create_rds/scripts/Move_RDS_VMs_Into_OUs.ps1 +++ b/deployment/secure_research_environment/remote/create_rds/scripts/Move_RDS_VMs_Into_OUs.ps1 @@ -5,14 +5,16 @@ # job, but this does not seem to have an immediate effect # For details, see https://docs.microsoft.com/en-gb/azure/virtual-machines/windows/run-command param( - [Parameter(Position=0, HelpMessage = "SHM DN")] - [string]$shmDn, - [Parameter(Position=1, HelpMessage = "RDS Gateway hostname")] - [string]$gatewayHostname, - [Parameter(Position=2, HelpMessage = "RDS Session Host 1 hostname")] - [string]$sh1Hostname, - [Parameter(Position=3, HelpMessage = "RDS Session Host 2 hostname")] - [string]$sh2Hostname + [Parameter(HelpMessage = "SHM DN")] + [string]$shmDn, + [Parameter(HelpMessage = "RDS Gateway hostname")] + [string]$gatewayHostname, + [Parameter(HelpMessage = "RDS Session Host 1 hostname")] + [string]$sh1Hostname, + [Parameter(HelpMessage = "RDS Session Host 2 hostname")] + [string]$sh2Hostname, + [Parameter(HelpMessage = "RDS Session Host 3 hostname")] + [string]$sh3Hostname ) $gatewayTargetPath = "OU=Secure Research Environment Service Servers,$shmDn" @@ -21,23 +23,17 @@ $shTargetPath = "OU=Secure Research Environment RDS Session Servers,$shmDn" Write-Output " [ ] Moving '$gatewayHostname' to '$gatewayTargetPath'" Move-ADObject (Get-ADComputer -Identity $gatewayHostname) -TargetPath "$gatewayTargetPath" if ($?) { - Write-Host " [o] Completed" + Write-Output " [o] Completed" } else { - Write-Host " [x] Failed" + Write-Output " [x] Failed" } -Write-Output " [ ] Moving '$sh1Hostname' to '$shTargetPath'" -Move-ADObject (Get-ADComputer -Identity $sh1Hostname) -TargetPath "$shTargetPath" -if ($?) { - Write-Host " [o] Completed" -} else { - Write-Host " [x] Failed" +foreach ($sessionServerName in @($sh1Hostname, $sh2Hostname, $sh3Hostname)) { + Write-Output " [ ] Moving '$sessionServerName' to '$shTargetPath'" + Move-ADObject (Get-ADComputer -Identity $sessionServerName) -TargetPath "$shTargetPath" + if ($?) { + Write-Output " [o] Completed" + } else { + Write-Output " [x] Failed" + } } - -Write-Output " [ ] Moving '$sh2Hostname' to '$shTargetPath'" -Move-ADObject (Get-ADComputer -Identity $sh2Hostname) -TargetPath "$shTargetPath" -if ($?) { - Write-Host " [o] Completed" -} else { - Write-Host " [x] Failed" -} \ No newline at end of file diff --git a/deployment/secure_research_environment/remote/create_rds/templates/Deploy_RDS_Environment.template.ps1 b/deployment/secure_research_environment/remote/create_rds/templates/Deploy_RDS_Environment.template.ps1 index 9fa8ffe555..78c9ff2138 100644 --- a/deployment/secure_research_environment/remote/create_rds/templates/Deploy_RDS_Environment.template.ps1 +++ b/deployment/secure_research_environment/remote/create_rds/templates/Deploy_RDS_Environment.template.ps1 @@ -18,10 +18,10 @@ Start-Service ShellHWDetection # Setup user profile disk shares # ------------------------------ Write-Host -ForegroundColor Cyan "Creating user profile disk shares..." -ForEach (`$sharePath in ("F:\AppFileShares", "G:\RDPFileShares")) { +ForEach (`$sharePath in ("F:\AppFileShares", "G:\RDPFileShares", "H:\ReviewFileShares")) { `$_ = New-Item -ItemType Directory -Force -Path `$sharePath if(`$(Get-SmbShare | Where-Object -Property Path -eq `$sharePath) -eq `$null) { - New-SmbShare -Path `$sharePath -Name `$sharePath.Split("\")[1] -FullAccess "$shmNetbiosName\$rdsGatewayVmName$","$shmNetbiosName\$rdsSh1VmName$","$shmNetbiosName\$rdsSh2VmName$","$shmNetbiosName\Domain Admins" + New-SmbShare -Path `$sharePath -Name `$sharePath.Split("\")[1] -FullAccess "$shmNetbiosName\$rdsGatewayVmName$","$shmNetbiosName\$rdsSh1VmName$","$shmNetbiosName\$rdsSh2VmName$","$shmNetbiosName\$rdsSh3VmName$","$shmNetbiosName\Domain Admins" } } @@ -64,6 +64,12 @@ New-RDSessionCollection -CollectionName "`$collectionName" -SessionHost $rdsSh2V Set-RDSessionCollectionConfiguration -CollectionName "`$collectionName" -UserGroup "$shmNetbiosName\SG $sreNetbiosName Research Users" -ClientPrinterRedirected `$false -ClientDeviceRedirectionOptions None -DisconnectedSessionLimitMin 5 -IdleSessionLimitMin 720 -ConnectionBroker $rdsGatewayVmFqdn Set-RDSessionCollectionConfiguration -CollectionName "`$collectionName" -EnableUserProfileDisk -MaxUserProfileDiskSizeGB "20" -DiskPath \\$rdsGatewayVmName\RDPFileShares -ConnectionBroker $rdsGatewayVmFqdn +`$collectionName = "Review" +Write-Host -ForegroundColor Cyan "Creating '`$collectionName' collection..." +New-RDSessionCollection -CollectionName "`$collectionName" -SessionHost $rdsSh3VmFqdn -ConnectionBroker $rdsGatewayVmFqdn +Set-RDSessionCollectionConfiguration -CollectionName "`$collectionName" -UserGroup "$shmNetbiosName\SG $sreNetbiosName Reviewers" -ClientPrinterRedirected `$false -ClientDeviceRedirectionOptions None -DisconnectedSessionLimitMin 5 -IdleSessionLimitMin 720 -ConnectionBroker $rdsGatewayVmFqdn +Set-RDSessionCollectionConfiguration -CollectionName "`$collectionName" -EnableUserProfileDisk -MaxUserProfileDiskSizeGB "20" -DiskPath \\$rdsGatewayVmName\ReviewFileShares -ConnectionBroker $rdsGatewayVmFqdn + # Create applications # ------------------- @@ -75,6 +81,7 @@ New-RDRemoteApp -Alias "putty (2)" -DisplayName "DSVM Other (SSH)" -FilePath "C: New-RDRemoteApp -Alias WinSCP -DisplayName "File Transfer" -FilePath "C:\Program Files (x86)\WinSCP\WinSCP.exe" -ShowInWebAccess 1 -CollectionName "Remote Applications" -ConnectionBroker $rdsGatewayVmFqdn New-RDRemoteApp -Alias "chrome (1)" -DisplayName "GitLab" -FilePath "C:\Program Files (x86)\Google\Chrome\Application\chrome.exe" -ShowInWebAccess 1 -CommandLineSetting Require -RequiredCommandLine "http://$dataSubnetIpPrefix.151" -CollectionName "Remote Applications" -ConnectionBroker $rdsGatewayVmFqdn New-RDRemoteApp -Alias "chrome (2)" -DisplayName "HackMD" -FilePath "C:\Program Files (x86)\Google\Chrome\Application\chrome.exe" -ShowInWebAccess 1 -CommandLineSetting Require -RequiredCommandLine "http://$dataSubnetIpPrefix.152:3000" -CollectionName "Remote Applications" -ConnectionBroker $rdsGatewayVmFqdn +New-RDRemoteApp -Alias "chrome (3)" -DisplayName "GitLab Review" -FilePath "C:\Program Files (x86)\Google\Chrome\Application\chrome.exe" -ShowInWebAccess 1 -CommandLineSetting Require -RequiredCommandLine "http://$airlockSubnetIpPrefix.151" -CollectionName "Review" -ConnectionBroker $rdsGatewayVmFqdn # Update server configuration diff --git a/deployment/secure_research_environment/setup/Setup_SRE_VNET_RDS.ps1 b/deployment/secure_research_environment/setup/Setup_SRE_VNET_RDS.ps1 index 93b67c3866..c67c92d89a 100644 --- a/deployment/secure_research_environment/setup/Setup_SRE_VNET_RDS.ps1 +++ b/deployment/secure_research_environment/setup/Setup_SRE_VNET_RDS.ps1 @@ -107,13 +107,15 @@ $containerNameGateway = "sre-rds-gateway-scripts" $containerNameSessionHosts = "sre-rds-sh-packages" $vmNamePairs = @(("RDS Gateway", $config.sre.rds.gateway.vmName), ("RDS Session Host (App server)", $config.sre.rds.sessionHost1.vmName), - ("RDS Session Host (Remote desktop server)", $config.sre.rds.sessionHost2.vmName)) + ("RDS Session Host (Remote desktop server)", $config.sre.rds.sessionHost2.vmName), + ("RDS Session Host (Review server)", $config.sre.rds.sessionHost3.vmName)) # Set variables used in template expansion, retrieving from the key vault where appropriate # ----------------------------------------------------------------------------------------- Add-LogMessage -Level Info "Creating/retrieving secrets from key vault '$($config.sre.keyVault.name)'..." $dataSubnetIpPrefix = $config.sre.network.subnets.data.prefix +$airlockSubnetIpPrefix = $config.sre.network.subnets.airlock.prefix $npsSecret = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.Name -SecretName $config.sre.keyVault.secretNames.npsSecret -DefaultLength 12 $rdsGatewayVmFqdn = $config.sre.rds.gateway.fqdn $rdsGatewayVmName = $config.sre.rds.gateway.vmName @@ -121,6 +123,8 @@ $rdsSh1VmFqdn = $config.sre.rds.sessionHost1.fqdn $rdsSh1VmName = $config.sre.rds.sessionHost1.vmName $rdsSh2VmFqdn = $config.sre.rds.sessionHost2.fqdn $rdsSh2VmName = $config.sre.rds.sessionHost2.vmName +$rdsSh3VmFqdn = $config.sre.rds.sessionHost3.fqdn +$rdsSh3VmName = $config.sre.rds.sessionHost3.vmName $shmDcAdminPassword = Resolve-KeyVaultSecret -VaultName $config.shm.keyVault.name -SecretName $config.shm.keyVault.secretNames.domainAdminPassword $shmDcAdminUsername = Resolve-KeyVaultSecret -VaultName $config.shm.keyVault.name -SecretName $config.shm.keyVault.secretNames.vmAdminUsername -DefaultValue "shm$($config.shm.id)admin".ToLower() $shmNetbiosName = $config.shm.domain.netbiosName @@ -202,6 +206,9 @@ $params = @{ RDS_Session_Host_Desktop_IP_Address = $config.sre.rds.sessionHost2.ip RDS_Session_Host_Desktop_Name = $config.sre.rds.sessionHost2.vmName RDS_Session_Host_Desktop_VM_Size = $config.sre.rds.sessionHost2.vmSize + RDS_Session_Host_Review_IP_Address = $config.sre.rds.sessionHost3.ip + RDS_Session_Host_Review_Name = $config.sre.rds.sessionHost3.vmName + RDS_Session_Host_Review_VM_Size = $config.sre.rds.sessionHost3.vmSize SRE_ID = $config.sre.Id Virtual_Network_Name = $config.sre.network.vnet.Name Virtual_Network_Resource_Group = $config.sre.network.vnet.rg @@ -322,6 +329,7 @@ $params = @{ gatewayHostname = "`"$($config.sre.rds.gateway.hostname)`"" sh1Hostname = "`"$($config.sre.rds.sessionHost1.hostname)`"" sh2Hostname = "`"$($config.sre.rds.sessionHost2.hostname)`"" + sh3Hostname = "`"$($config.sre.rds.sessionHost2.hostname)`"" } $result = Invoke-RemoteScript -Shell "PowerShell" -ScriptPath $scriptPath -VMName $config.shm.dc.vmName -ResourceGroupName $config.shm.dc.rg -Parameter $params Write-Output $result.Value @@ -350,6 +358,7 @@ foreach ($blob in Get-AzStorageBlob -Container $containerNameSessionHosts -Conte if (($blob.Name -like "*GoogleChrome_x64.msi") -or ($blob.Name -like "*PuTTY_x64.msi") -or ($blob.Name -like "*WinSCP_x32.exe")) { $_ = $filePathsSh1.Add($blob.Name) $_ = $filePathsSh2.Add($blob.Name) + $_ = $filePathsSh3.Add($blob.Name) } elseif ($blob.Name -like "*LibreOffice_x64.msi") { $_ = $filePathsSh2.Add($blob.Name) } @@ -405,6 +414,19 @@ $params = @{ $result = Invoke-RemoteScript -Shell "PowerShell" -ScriptPath $scriptPath -VMName $config.sre.rds.sessionHost2.vmName -ResourceGroupName $config.sre.rds.rg -Parameter $params Write-Output $result.Value +# Copy software and/or scripts to RDS SH3 (Review server) +Add-LogMessage -Level Info "[ ] Copying $($filePathsSh3.Count) files to RDS Session Host (Review server)" +$params = @{ + storageAccountName = "`"$($sreStorageAccount.StorageAccountName)`"" + storageService = "blob" + shareOrContainerName = "`"$containerNameSessionHosts`"" + sasToken = "`"$sasToken`"" + pipeSeparatedremoteFilePaths = "`"$($filePathsSh3 -join "|")`"" + downloadDir = "$remoteUploadDir" +} +$result = Invoke-RemoteScript -Shell "PowerShell" -ScriptPath $scriptPath -VMName $config.sre.rds.sessionHost3.vmName -ResourceGroupName $config.sre.rds.rg -Parameter $params +Write-Output $result.Value + # Install packages on RDS VMs # --------------------------- @@ -434,6 +456,7 @@ Write-Output $result.Value Add-VmToNSG -VMName $config.sre.rds.gateway.vmName -NSGName $config.sre.rds.gateway.nsg Add-VmToNSG -VMName $config.sre.rds.sessionHost1.vmName -NSGName $config.sre.rds.sessionHost1.nsg Add-VmToNSG -VMName $config.sre.rds.sessionHost2.vmName -NSGName $config.sre.rds.sessionHost2.nsg +Add-VmToNSG -VMName $config.sre.rds.sessionHost3.vmName -NSGName $config.sre.rds.sessionHost3.nsg # Reboot all the RDS VMs From 3f046793eaea48162fa82e8a62865e75d1b6c557 Mon Sep 17 00:00:00 2001 From: Jack Roberts Date: Wed, 29 Apr 2020 14:54:33 +0100 Subject: [PATCH 007/155] Add SSH key and git setup on gitlab external --- .../cloud-init-gitlab-external.template.yaml | 38 ++++++++++++++++++- .../setup/Setup_SRE_WebApp_Servers.ps1 | 27 ++++++++----- 2 files changed, 53 insertions(+), 12 deletions(-) diff --git a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml index eb22e2a12e..ccf10a69e9 100644 --- a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml +++ b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml @@ -3,12 +3,46 @@ package_update: true package_upgrade: true # Install LDAP tools for debugging LDAP issues +# !!!TODO openssh-server for access during development only!!!! packages: - git + - openssh-client + - openssh-server + +write_files: + - path: "/home//.secrets/gitlab-internal-api-token" + permissions: "0600" + content: | + + - path: "/home//.secrets/gitlab-internal-ip-address" + permissions: "0600" + content: | + runcmd: - # Configure server - - echo "Configuring server" + # Change ownership of secrets to + - | + chown : "/home//.secrets/gitlab-internal-api-token"; + chown : "/home//.secrets/gitlab-internal-ip-address"; + # Create SSH key for gitlab internal access, add gitlab internal to known hosts + - | + echo "Configuring SSH for gitlab internal"; + mkdir -p /home//.ssh; + ssh-keygen -t ed25519 -C 'gitlab-internal' -N '' -f /home//.ssh/id_ed25519; + chown : "/home//.ssh/id_ed25519" + chown : "/home//.ssh/id_ed25519.pub" + key=$(cat /home//.ssh/id_ed25519.pub); + curl --header 'Authorization: Bearer ' --header 'Content-Type:application/json' --data "{\"key\": \"$key\", \"title\": \"external-ingress\"}" /api/v4/user/keys; + ssh-keyscan -H >> /home//.ssh/known_hosts; + chown : "/home//.ssh/known_hosts" + # Configure global git user to be gitlab internal user + - | + echo "Configuring git global user"; + HOME=/home/ git config --global user.name ''; + HOME=/home/ git config --global user.email '@'; + # Give ownership of their home directory + - | + chown -R : "/home/"; # Shutdown so that we can tell when the job has finished by polling the VM state power_state: diff --git a/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 b/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 index c99091396e..2b9f0b78c4 100644 --- a/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 +++ b/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 @@ -45,13 +45,14 @@ Add-NetworkSecurityGroupRule -NetworkSecurityGroup $nsgGitlabInternal ` $nsgGitlabExternal = Deploy-NetworkSecurityGroup -Name $config.sre.network.nsg.airlock.name -ResourceGroupName $config.sre.network.vnet.rg -Location $config.sre.location -Add-NetworkSecurityGroupRule -NetworkSecurityGroup $nsgGitlabExternal ` - -Name "InboundDenyAll" ` - -Description "Inbound deny everything" ` - -Priority 4000 ` - -Direction Inbound -Access Deny -Protocol * ` - -SourceAddressPrefix * -SourcePortRange * ` - -DestinationAddressPrefix * -DestinationPortRange * +# TODO Removed for development testing +# Add-NetworkSecurityGroupRule -NetworkSecurityGroup $nsgGitlabExternal ` +# -Name "InboundDenyAll" ` +# -Description "Inbound deny everything" ` +# -Priority 4000 ` +# -Direction Inbound -Access Deny -Protocol * ` +# -SourceAddressPrefix * -SourcePortRange * ` +# -DestinationAddressPrefix * -DestinationPortRange * # Check that VNET and subnet exist @@ -63,8 +64,8 @@ $subnet = Deploy-Subnet -Name $config.sre.network.subnets.airlock.Name -VirtualN Set-SubnetNetworkSecurityGroup -Subnet $subnet -NetworkSecurityGroup $nsgGitlabExternal -VirtualNetwork $vnet -# Expand GitLab cloudinit -# ----------------------- +# Expand GitLab internal cloudinit +# -------------------------------- $shmDcFqdn = ($config.shm.dc.hostname + "." + $config.shm.domain.fqdn) $gitlabFqdn = $config.sre.webapps.gitlab.internal.hostname + "." + $config.sre.domain.fqdn $gitlabLdapUserDn = "CN=" + $config.sre.users.ldap.gitlab.name + "," + $config.shm.domain.serviceOuPath @@ -184,7 +185,13 @@ $vmNic = Deploy-VirtualMachineNIC -Name "$vmName-NIC" -ResourceGroupName $config # Deploy the GitLab external VM # ------------------------------ $bootDiagnosticsAccount = Deploy-StorageAccount -Name $config.sre.storage.bootdiagnostics.accountName -ResourceGroupName $config.sre.storage.bootdiagnostics.rg -Location $config.sre.location + $gitlabExternalCloudInitTemplate = Join-Path $PSScriptRoot ".." "cloud_init" "cloud-init-gitlab-external.template.yaml" | Get-Item | Get-Content -Raw +$gitlabExternalCloudInit = $gitlabExternalCloudInitTemplate.Replace('',$config.sre.webapps.gitlab.internal.ip). + Replace('',$sreAdminUsername). + Replace('',$config.shm.domain.fqdn). + Replace('',$gitlabExternalUsername). + Replace('',$gitlabExternalAPIToken) $params = @{ Name = $vmName @@ -192,7 +199,7 @@ $params = @{ AdminPassword = $sreAdminPassword AdminUsername = $sreAdminUsername BootDiagnosticsAccount = $bootDiagnosticsAccount - CloudInitYaml = $gitlabExternalCloudInitTemplate + CloudInitYaml = $gitlabExternalCloudInit location = $config.sre.location NicId = $vmNic.Id OsDiskType = "Standard_LRS" From ff4881015dfc60db40fde35b3bb7a573edcf244b Mon Sep 17 00:00:00 2001 From: Oliver Strickson Date: Wed, 29 Apr 2020 18:02:47 +0100 Subject: [PATCH 008/155] WIP Script to update GitLab projects from whitelist --- .../cloud-init-gitlab-external.template.yaml | 59 +++++++++++++++++++ 1 file changed, 59 insertions(+) diff --git a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml index ccf10a69e9..57e5628b27 100644 --- a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml +++ b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml @@ -18,6 +18,65 @@ write_files: permissions: "0600" content: | + - path: "/home//update_from_whitelist.py" + permissions: "0755" + content: | + import os + import json + import requests + import subprocess + from pathlib import Path + + home = str(Path.home()) + + with open("../whitelist", "r") as f: + whitelist = [line.strip().split(" ") for line in f.readlines()] + + whitelist = [{"url": repo[0], + "commit_sha": repo[1], + "gitlab_name": repo[2], + "gitlab_branch": repo[3]} for repo in whitelist] + + with open(f"{home}/.secrets/gitlab-internal-ip-address","r") as f: + gitlab_internal_ip = f.readlines()[0].strip() + + with open(f"{home}/.secrets/gitlab-internal-api-token","r") as f: + gitlab_token = f.readlines()[0].strip() + + gitlab_internal_url = "http://" + gitlab_internal_ip + "/api/v4/projects" + + gitlab_internal_projects = requests.get(gitlab_internal_url, + headers = {"Authorization": "Bearer " + gitlab_token}, + params = {"owned": True, "simple": True}) + + gitlab_internal_repo_names = [repo["name"].lower() for repo in gitlab_internal_projects.json()] + for repo in whitelist: + repo_name = repo["gitlab_name"] + repo_path = os.path.join(repo_name) + if not os.path.exists(repo_path): + os.system("git clone " + repo["url"] + " " + repo["gitlab_name"]) + + os.chdir(repo["gitlab_name"]) + os.system("git fetch") + os.system("git checkout " + repo["commit_sha"]) + os.system("git branch -f " + repo["gitlab_branch"]) + os.system("git checkout " + repo["gitlab_branch"]) + + if repo_name.lower() not in gitlab_internal_repo_names: + response = requests.post(gitlab_internal_url, + headers = {"Authorization": "Bearer " + gitlab_token}, + data = {"name": repo_name, "visibility": "public"}) + + gitlab_internal_remote_url = response.json()["ssh_url_to_repo"] + assert(response.json()["name"] == repo_name) + + print("Adding remote gitlab-internal as " + gitlab_internal_remote_url) + os.system("git remote add gitlab-internal " + gitlab_internal_remote_url) + + + os.system("git push gitlab-internal " + repo["gitlab_branch"]) + + os.chdir("..") runcmd: # Change ownership of secrets to From 8507b6364ad53a4d214651eb51b5131d5613f0b4 Mon Sep 17 00:00:00 2001 From: James Robinson Date: Tue, 28 Apr 2020 11:35:56 +0100 Subject: [PATCH 009/155] Updated review session server settings --- deployment/common/Configuration.psm1 | 5 + ...e_New_SRE_User_Service_Accounts_Remote.ps1 | 22 ++- .../Deploy_RDS_Environment.template.ps1 | 130 +++++++++--------- .../setup/Add_SRE_Data_To_SHM.ps1 | 2 + .../setup/Setup_SRE_VNET_RDS.ps1 | 31 ++++- .../full/sre_mortestsandbox_full_config.json | 9 ++ 6 files changed, 122 insertions(+), 77 deletions(-) diff --git a/deployment/common/Configuration.psm1 b/deployment/common/Configuration.psm1 index 2e925977da..e60a10998b 100644 --- a/deployment/common/Configuration.psm1 +++ b/deployment/common/Configuration.psm1 @@ -300,6 +300,7 @@ function Add-SreConfig { $serverAdminsGroup = "SG $($config.sre.domain.netbiosName) Server Administrators" $sqlAdminsGroup = "SG $($config.sre.domain.netbiosName) SQL Server Administrators" $researchUsersGroup = "SG $($config.sre.domain.netbiosName) Research Users" + $reviewUsers= "SG $($config.sre.domain.netbiosName) Research Users" $config.sre.domain.securityGroups = [ordered]@{ serverAdmins = [ordered]@{ name = $serverAdminsGroup @@ -313,6 +314,10 @@ function Add-SreConfig { name = $researchUsersGroup description = $researchUsersGroup } + reviewUsers = [ordered]@{ + name = $reviewUsers + description = $reviewUsers + } } # --- Network config --- diff --git a/deployment/secure_research_environment/remote/configure_shm_dc/scripts/Create_New_SRE_User_Service_Accounts_Remote.ps1 b/deployment/secure_research_environment/remote/configure_shm_dc/scripts/Create_New_SRE_User_Service_Accounts_Remote.ps1 index f6e2a84299..91e181ab59 100644 --- a/deployment/secure_research_environment/remote/configure_shm_dc/scripts/Create_New_SRE_User_Service_Accounts_Remote.ps1 +++ b/deployment/secure_research_environment/remote/configure_shm_dc/scripts/Create_New_SRE_User_Service_Accounts_Remote.ps1 @@ -9,6 +9,8 @@ param( [String]$shmFqdn, [String]$researchUserSgName, [String]$researchUserSgDescription, + [String]$reviewUserSgName, + [String]$reviewUserSgDescription, [String]$sqlAdminSgName, [String]$sqlAdminSgDescription, [String]$ldapUserSgName, @@ -80,6 +82,7 @@ $testResearcherPasswordSecureString = ConvertTo-SecureString -String $testResear # Create SRE Security Groups New-SreGroup -name $researchUserSgName -description $researchUserSgDescription -Path $securityOuPath -GroupScope Global -GroupCategory Security +New-SreGroup -name $reviewUserSgName -description $reviewUserSgDescription -Path $securityOuPath -GroupScope Global -GroupCategory Security New-SreGroup -name $sqlAdminSgName -description $sqlAdminSgDescription -Path $securityOuPath -GroupScope Global -GroupCategory Security # Create Service Accounts for SRE @@ -89,10 +92,15 @@ New-SreUser -samAccountName $dsvmSamAccountName -name $dsvmName -path $serviceOu New-SreUser -samAccountName $dataMountSamAccountName -name $dataMountName -path $serviceOuPath -passwordSecureString $dataMountPasswordSecureString New-SreUser -samAccountName $testResearcherSamAccountName -name $testResearcherName -path $researchUserOuPath -passwordSecureString $testResearcherPasswordSecureString -# Add Data Science LDAP users to SG Data Science LDAP Users security group -Write-Output " [ ] Adding '$dsvmSamAccountName' user to group '$ldapUserSgName'" -Add-ADGroupMember "$ldapUserSgName" "$dsvmSamAccountName" - -# Add SRE test users to the relative Security Groups -Write-Output " [ ] Adding '$testResearcherSamAccountName' user to group '$researchUserSgName'" -Add-ADGroupMember "$researchUserSgName" "$testResearcherSamAccountName" +# Add users to the relevant security groups +foreach ($userGroupPair in @(($dsvmSamAccountName, $ldapUserSgName), + ($testResearcherSamAccountName, $researchUserSgName))) { + $samAccountName, $sgName = $userGroupPair + Write-Output " [ ] Ensuring that '$samAccountName' user belongs to group '$sgName'" + Add-ADGroupMember "$sgName" "$samAccountName" + if ($?) { + Write-Output " [o] Succeeded" + } else { + Write-Output " [x] Failed to add '$samAccountName' to group '$sgName'!" + } +} diff --git a/deployment/secure_research_environment/remote/create_rds/templates/Deploy_RDS_Environment.template.ps1 b/deployment/secure_research_environment/remote/create_rds/templates/Deploy_RDS_Environment.template.ps1 index 78c9ff2138..4c71ccdb52 100644 --- a/deployment/secure_research_environment/remote/create_rds/templates/Deploy_RDS_Environment.template.ps1 +++ b/deployment/secure_research_environment/remote/create_rds/templates/Deploy_RDS_Environment.template.ps1 @@ -3,121 +3,115 @@ Import-Module RemoteDesktop # Initialise the data drives # -------------------------- -Write-Host -ForegroundColor Cyan "Initialising data drives..." +Write-Output "Initialising data drives..." Stop-Service ShellHWDetection -`$CandidateRawDisks = Get-Disk | Where {`$_.PartitionStyle -eq 'raw'} | Sort -Property Number -Foreach (`$RawDisk in `$CandidateRawDisks) { - `$LUN = (Get-WmiObject Win32_DiskDrive | Where index -eq `$RawDisk.Number | Select SCSILogicalUnit -ExpandProperty SCSILogicalUnit) - `$Disk = Initialize-Disk -PartitionStyle GPT -Number `$RawDisk.Number - `$Partition = New-Partition -DiskNumber `$RawDisk.Number -UseMaximumSize -AssignDriveLetter - `$Volume = Format-Volume -Partition `$Partition -FileSystem NTFS -NewFileSystemLabel "DATA-`$LUN" -Confirm:`$false +$CandidateRawDisks = Get-Disk | Where-Object {$_.PartitionStyle -eq 'raw'} | Sort -Property Number +foreach ($RawDisk in $CandidateRawDisks) { + $LUN = (Get-WmiObject Win32_DiskDrive | Where-Object index -eq $RawDisk.Number | Select-Object SCSILogicalUnit -ExpandProperty SCSILogicalUnit) + $_ = Initialize-Disk -PartitionStyle GPT -Number $RawDisk.Number + $Partition = New-Partition -DiskNumber $RawDisk.Number -UseMaximumSize -AssignDriveLetter + $_ = Format-Volume -Partition $Partition -FileSystem NTFS -NewFileSystemLabel "DATA-$LUN" -Confirm:$false } Start-Service ShellHWDetection # Setup user profile disk shares # ------------------------------ -Write-Host -ForegroundColor Cyan "Creating user profile disk shares..." -ForEach (`$sharePath in ("F:\AppFileShares", "G:\RDPFileShares", "H:\ReviewFileShares")) { - `$_ = New-Item -ItemType Directory -Force -Path `$sharePath - if(`$(Get-SmbShare | Where-Object -Property Path -eq `$sharePath) -eq `$null) { - New-SmbShare -Path `$sharePath -Name `$sharePath.Split("\")[1] -FullAccess "$shmNetbiosName\$rdsGatewayVmName$","$shmNetbiosName\$rdsSh1VmName$","$shmNetbiosName\$rdsSh2VmName$","$shmNetbiosName\$rdsSh3VmName$","$shmNetbiosName\Domain Admins" +Write-Output "Creating user profile disk shares..." +foreach ($sharePath in ("F:\AppFileShares", "G:\RDPFileShares", "H:\ReviewFileShares")) { + $_ = New-Item -ItemType Directory -Force -Path $sharePath + if($null -eq $(Get-SmbShare | Where-Object -Property Path -eq $sharePath)) { + New-SmbShare -Path $sharePath -Name $sharePath.Split("\")[1] -FullAccess "\$","\$","\$","\$","\Domain Admins" } } # Remove any old RDS settings # --------------------------- -ForEach (`$collection in `$(Get-RDSessionCollection -ErrorAction SilentlyContinue)) { - Write-Host -ForegroundColor Cyan "Removing existing RDSessionCollection: '`$collection.CollectionName' (and associated apps)" - Remove-RDSessionCollection -CollectionName `$collection.CollectionName -Force -ErrorAction SilentlyContinue +foreach ($collection in $(Get-RDSessionCollection -ErrorAction SilentlyContinue)) { + Write-Output "Removing existing RDSessionCollection: '$collection.CollectionName' (and associated apps)" + Remove-RDSessionCollection -CollectionName $collection.CollectionName -Force -ErrorAction SilentlyContinue } -ForEach (`$server in `$(Get-RDServer -ErrorAction SilentlyContinue)) { - Write-Host -ForegroundColor Cyan "Removing existing RDServer: '`$(`$server.Server)'" - ForEach (`$role in `$server.Roles) { - Remove-RDServer -Server `$server.Server -Role `$role -Force -ErrorAction SilentlyContinue +foreach ($server in $(Get-RDServer -ErrorAction SilentlyContinue)) { + Write-Output "Removing existing RDServer: '$($server.Server)'" + foreach ($role in $server.Roles) { + Remove-RDServer -Server $server.Server -Role $role -Force -ErrorAction SilentlyContinue } } # Create RDS Environment # ---------------------- -Write-Host -ForegroundColor Cyan "Creating RDS Environment..." -New-RDSessionDeployment -ConnectionBroker "$rdsGatewayVmFqdn" -WebAccessServer "$rdsGatewayVmFqdn" -SessionHost @("$rdsSh1VmFqdn", "$rdsSh2VmFqdn") -Add-RDServer -Server $rdsGatewayVmFqdn -Role RDS-LICENSING -ConnectionBroker $rdsGatewayVmFqdn -Set-RDLicenseConfiguration -LicenseServer $rdsGatewayVmFqdn -Mode PerUser -ConnectionBroker $rdsGatewayVmFqdn -Force +Write-Output "Creating RDS Environment..." +New-RDSessionDeployment -ConnectionBroker "" -WebAccessServer "" -SessionHost @("", "", "") +Add-RDServer -Server -Role RDS-LICENSING -ConnectionBroker +Set-RDLicenseConfiguration -LicenseServer -Mode PerUser -ConnectionBroker -Force Add-WindowsFeature -Name RDS-Gateway -IncludeAllSubFeature -Add-RDServer -Server $rdsGatewayVmFqdn -Role RDS-GATEWAY -ConnectionBroker $rdsGatewayVmFqdn -GatewayExternalFqdn $sreFqdn +Add-RDServer -Server -Role RDS-GATEWAY -ConnectionBroker -GatewayExternalFqdn $sreFqdn # Create collections # ------------------ -`$collectionName = "Remote Applications" -Write-Host -ForegroundColor Cyan "Creating '`$collectionName' collection..." -New-RDSessionCollection -CollectionName "`$collectionName" -SessionHost $rdsSh1VmFqdn -ConnectionBroker $rdsGatewayVmFqdn -Set-RDSessionCollectionConfiguration -CollectionName "`$collectionName" -UserGroup "$shmNetbiosName\SG $sreNetbiosName Research Users" -ClientPrinterRedirected `$false -ClientDeviceRedirectionOptions None -DisconnectedSessionLimitMin 5 -IdleSessionLimitMin 720 -ConnectionBroker $rdsGatewayVmFqdn -Set-RDSessionCollectionConfiguration -CollectionName "`$collectionName" -EnableUserProfileDisk -MaxUserProfileDiskSizeGB "20" -DiskPath \\$rdsGatewayVmName\AppFileShares -ConnectionBroker $rdsGatewayVmFqdn +$collectionName = "Remote Applications" +Write-Output "Creating '$collectionName' collection..." +New-RDSessionCollection -CollectionName "$collectionName" -SessionHost -ConnectionBroker +Set-RDSessionCollectionConfiguration -CollectionName "$collectionName" -UserGroup "\" -ClientPrinterRedirected $false -ClientDeviceRedirectionOptions None -DisconnectedSessionLimitMin 5 -IdleSessionLimitMin 720 -ConnectionBroker +Set-RDSessionCollectionConfiguration -CollectionName "$collectionName" -EnableUserProfileDisk -MaxUserProfileDiskSizeGB "20" -DiskPath \\\AppFileShares -ConnectionBroker -`$collectionName = "Presentation Server" -Write-Host -ForegroundColor Cyan "Creating '`$collectionName' collection..." -New-RDSessionCollection -CollectionName "`$collectionName" -SessionHost $rdsSh2VmFqdn -ConnectionBroker $rdsGatewayVmFqdn -Set-RDSessionCollectionConfiguration -CollectionName "`$collectionName" -UserGroup "$shmNetbiosName\SG $sreNetbiosName Research Users" -ClientPrinterRedirected `$false -ClientDeviceRedirectionOptions None -DisconnectedSessionLimitMin 5 -IdleSessionLimitMin 720 -ConnectionBroker $rdsGatewayVmFqdn -Set-RDSessionCollectionConfiguration -CollectionName "`$collectionName" -EnableUserProfileDisk -MaxUserProfileDiskSizeGB "20" -DiskPath \\$rdsGatewayVmName\RDPFileShares -ConnectionBroker $rdsGatewayVmFqdn +$collectionName = "Presentation Server" +Write-Output "Creating '$collectionName' collection..." +New-RDSessionCollection -CollectionName "$collectionName" -SessionHost -ConnectionBroker +Set-RDSessionCollectionConfiguration -CollectionName "$collectionName" -UserGroup "\" -ClientPrinterRedirected $false -ClientDeviceRedirectionOptions None -DisconnectedSessionLimitMin 5 -IdleSessionLimitMin 720 -ConnectionBroker +Set-RDSessionCollectionConfiguration -CollectionName "$collectionName" -EnableUserProfileDisk -MaxUserProfileDiskSizeGB "20" -DiskPath \\\RDPFileShares -ConnectionBroker -`$collectionName = "Review" -Write-Host -ForegroundColor Cyan "Creating '`$collectionName' collection..." -New-RDSessionCollection -CollectionName "`$collectionName" -SessionHost $rdsSh3VmFqdn -ConnectionBroker $rdsGatewayVmFqdn -Set-RDSessionCollectionConfiguration -CollectionName "`$collectionName" -UserGroup "$shmNetbiosName\SG $sreNetbiosName Reviewers" -ClientPrinterRedirected `$false -ClientDeviceRedirectionOptions None -DisconnectedSessionLimitMin 5 -IdleSessionLimitMin 720 -ConnectionBroker $rdsGatewayVmFqdn -Set-RDSessionCollectionConfiguration -CollectionName "`$collectionName" -EnableUserProfileDisk -MaxUserProfileDiskSizeGB "20" -DiskPath \\$rdsGatewayVmName\ReviewFileShares -ConnectionBroker $rdsGatewayVmFqdn +$collectionName = "Review" +Write-Output "Creating '$collectionName' collection..." +New-RDSessionCollection -CollectionName "$collectionName" -SessionHost -ConnectionBroker +Set-RDSessionCollectionConfiguration -CollectionName "$collectionName" -UserGroup "\" -ClientPrinterRedirected $false -ClientDeviceRedirectionOptions None -DisconnectedSessionLimitMin 5 -IdleSessionLimitMin 720 -ConnectionBroker +Set-RDSessionCollectionConfiguration -CollectionName "$collectionName" -EnableUserProfileDisk -MaxUserProfileDiskSizeGB "20" -DiskPath \\\ReviewFileShares -ConnectionBroker # Create applications # ------------------- -Write-Host -ForegroundColor Cyan "Creating applications..." -New-RDRemoteApp -Alias "mstsc (1)" -DisplayName "DSVM Main (Desktop)" -FilePath "C:\Windows\system32\mstsc.exe" -ShowInWebAccess 1 -CommandLineSetting Require -RequiredCommandLine "-v $dataSubnetIpPrefix.160" -CollectionName "Remote Applications" -ConnectionBroker $rdsGatewayVmFqdn -New-RDRemoteApp -Alias "putty (1)" -DisplayName "DSVM Main (SSH)" -FilePath "C:\Program Files\PuTTY\putty.exe" -ShowInWebAccess 1 -CommandLineSetting Require -RequiredCommandLine "-ssh $dataSubnetIpPrefix.160" -CollectionName "Remote Applications" -ConnectionBroker $rdsGatewayVmFqdn -New-RDRemoteApp -Alias "mstsc (2)" -DisplayName "DSVM Other (Desktop)" -FilePath "C:\Windows\system32\mstsc.exe" -ShowInWebAccess 1 -CollectionName "Remote Applications" -ConnectionBroker $rdsGatewayVmFqdn -New-RDRemoteApp -Alias "putty (2)" -DisplayName "DSVM Other (SSH)" -FilePath "C:\Program Files\PuTTY\putty.exe" -ShowInWebAccess 1 -CollectionName "Remote Applications" -ConnectionBroker $rdsGatewayVmFqdn -New-RDRemoteApp -Alias WinSCP -DisplayName "File Transfer" -FilePath "C:\Program Files (x86)\WinSCP\WinSCP.exe" -ShowInWebAccess 1 -CollectionName "Remote Applications" -ConnectionBroker $rdsGatewayVmFqdn -New-RDRemoteApp -Alias "chrome (1)" -DisplayName "GitLab" -FilePath "C:\Program Files (x86)\Google\Chrome\Application\chrome.exe" -ShowInWebAccess 1 -CommandLineSetting Require -RequiredCommandLine "http://$dataSubnetIpPrefix.151" -CollectionName "Remote Applications" -ConnectionBroker $rdsGatewayVmFqdn -New-RDRemoteApp -Alias "chrome (2)" -DisplayName "HackMD" -FilePath "C:\Program Files (x86)\Google\Chrome\Application\chrome.exe" -ShowInWebAccess 1 -CommandLineSetting Require -RequiredCommandLine "http://$dataSubnetIpPrefix.152:3000" -CollectionName "Remote Applications" -ConnectionBroker $rdsGatewayVmFqdn -New-RDRemoteApp -Alias "chrome (3)" -DisplayName "GitLab Review" -FilePath "C:\Program Files (x86)\Google\Chrome\Application\chrome.exe" -ShowInWebAccess 1 -CommandLineSetting Require -RequiredCommandLine "http://$airlockSubnetIpPrefix.151" -CollectionName "Review" -ConnectionBroker $rdsGatewayVmFqdn +Write-Output "Creating applications..." +New-RDRemoteApp -Alias "mstsc (1)" -DisplayName "DSVM Main (Desktop)" -FilePath "C:\Windows\system32\mstsc.exe" -ShowInWebAccess 1 -CommandLineSetting Require -RequiredCommandLine "-v .160" -CollectionName "Remote Applications" -ConnectionBroker +New-RDRemoteApp -Alias "putty (1)" -DisplayName "DSVM Main (SSH)" -FilePath "C:\Program Files\PuTTY\putty.exe" -ShowInWebAccess 1 -CommandLineSetting Require -RequiredCommandLine "-ssh .160" -CollectionName "Remote Applications" -ConnectionBroker +New-RDRemoteApp -Alias "mstsc (2)" -DisplayName "DSVM Other (Desktop)" -FilePath "C:\Windows\system32\mstsc.exe" -ShowInWebAccess 1 -CollectionName "Remote Applications" -ConnectionBroker +New-RDRemoteApp -Alias "putty (2)" -DisplayName "DSVM Other (SSH)" -FilePath "C:\Program Files\PuTTY\putty.exe" -ShowInWebAccess 1 -CollectionName "Remote Applications" -ConnectionBroker +New-RDRemoteApp -Alias WinSCP -DisplayName "File Transfer" -FilePath "C:\Program Files (x86)\WinSCP\WinSCP.exe" -ShowInWebAccess 1 -CollectionName "Remote Applications" -ConnectionBroker +New-RDRemoteApp -Alias "chrome (1)" -DisplayName "GitLab" -FilePath "C:\Program Files (x86)\Google\Chrome\Application\chrome.exe" -ShowInWebAccess 1 -CommandLineSetting Require -RequiredCommandLine "http://.151" -CollectionName "Remote Applications" -ConnectionBroker +New-RDRemoteApp -Alias "chrome (2)" -DisplayName "HackMD" -FilePath "C:\Program Files (x86)\Google\Chrome\Application\chrome.exe" -ShowInWebAccess 1 -CommandLineSetting Require -RequiredCommandLine "http://.152:3000" -CollectionName "Remote Applications" -ConnectionBroker +New-RDRemoteApp -Alias "chrome (3)" -DisplayName "GitLab Review" -FilePath "C:\Program Files (x86)\Google\Chrome\Application\chrome.exe" -ShowInWebAccess 1 -CommandLineSetting Require -RequiredCommandLine "http://.151" -CollectionName "Review" -ConnectionBroker # Update server configuration # --------------------------- -Write-Host -ForegroundColor Cyan "Updating server configuration..." -`$targetDirectoryLocal = "C:\Users\$shmDcAdminUsername\AppData\Roaming\Microsoft\Windows\ServerManager" -`$targetDirectoryDomain = "C:\Users\$shmDcAdminUsername.$shmNetbiosName\AppData\Roaming\Microsoft\Windows\ServerManager" -`$_ = New-Item -ItemType Directory -Force -Path `$targetDirectoryLocal -`$_ = New-Item -ItemType Directory -Force -Path `$targetDirectoryDomain +Write-Output "Updating server configuration..." Get-Process ServerManager -ErrorAction SilentlyContinue | Stop-Process -Force -Copy-Item -Path "$remoteUploadDir\ServerList.xml" -Destination "`$targetDirectoryLocal\ServerList.xml" -Force -Copy-Item -Path "$remoteUploadDir\ServerList.xml" -Destination "`$targetDirectoryDomain\ServerList.xml" -Force -Start-Process -FilePath `$env:SystemRoot\System32\ServerManager.exe -WindowStyle Maximized -if (`$?) { - Write-Host " [o] Server configuration update succeeded" +foreach ($targetDirectory in @("C:\Users\\AppData\Roaming\Microsoft\Windows\ServerManager", + "C:\Users\.\AppData\Roaming\Microsoft\Windows\ServerManager")) { + $_ = New-Item -ItemType Directory -Force -Path $targetDirectory + Copy-Item -Path "\ServerList.xml" -Destination "$targetDirectory\ServerList.xml" -Force +} +Start-Process -FilePath $env:SystemRoot\System32\ServerManager.exe -WindowStyle Maximized +if ($?) { + Write-Output " [o] Server configuration update succeeded" } else { - Write-Host " [x] Server configuration update failed!" + Write-Output " [x] Server configuration update failed!" } # Install RDS webclient # --------------------- -Write-Host "Installing RDS webclient..." +Write-Output "Installing RDS webclient..." Install-RDWebClientPackage -if (`$?) { - Write-Host " [o] RDS webclient installation succeeded" +if ($?) { + Write-Output " [o] RDS webclient installation succeeded" } else { - Write-Host " [x] RDS webclient installation failed!" + Write-Output " [x] RDS webclient installation failed!" } -# # Update where the remote desktop is hosted -# # ----------------------------------------- -# Invoke-Expression -Command "$remoteUploadDir\Set-RDPublishedName.ps1 -ClientAccessName `$sreFqdn" - - # Remove the requirement for the /RDWeb/webclient/ suffix by setting up a redirect in IIS # --------------------------------------------------------------------------------------- Set-WebConfiguration system.webServer/httpRedirect "IIS:\sites\Default Web Site" -Value @{enabled="true";destination="/RDWeb/webclient/";httpResponseStatus="Permanent"} diff --git a/deployment/secure_research_environment/setup/Add_SRE_Data_To_SHM.ps1 b/deployment/secure_research_environment/setup/Add_SRE_Data_To_SHM.ps1 index c1b1649b14..f10036e939 100644 --- a/deployment/secure_research_environment/setup/Add_SRE_Data_To_SHM.ps1 +++ b/deployment/secure_research_environment/setup/Add_SRE_Data_To_SHM.ps1 @@ -55,6 +55,8 @@ $params = @{ sreFqdn = "`"$($config.sre.domain.fqdn)`"" researchUserSgName = "`"$($config.sre.domain.securityGroups.researchUsers.name)`"" researchUserSgDescription = "`"$($config.sre.domain.securityGroups.researchUsers.description)`"" + reviewUserSgName = "`"$($config.sre.domain.securityGroups.reviewUsers.name)`"" + reviewUserSgDescription = "`"$($config.sre.domain.securityGroups.reviewUsers.description)`"" sqlAdminSgName = "`"$($config.sre.domain.securityGroups.sqlAdmins.name)`"" sqlAdminSgDescription = "`"$($config.sre.domain.securityGroups.sqlAdmins.description)`"" ldapUserSgName = "`"$($config.shm.domain.securityGroups.dsvmLdapUsers.name)`"" diff --git a/deployment/secure_research_environment/setup/Setup_SRE_VNET_RDS.ps1 b/deployment/secure_research_environment/setup/Setup_SRE_VNET_RDS.ps1 index c67c92d89a..3e2d86532e 100644 --- a/deployment/secure_research_environment/setup/Setup_SRE_VNET_RDS.ps1 +++ b/deployment/secure_research_environment/setup/Setup_SRE_VNET_RDS.ps1 @@ -125,6 +125,8 @@ $rdsSh2VmFqdn = $config.sre.rds.sessionHost2.fqdn $rdsSh2VmName = $config.sre.rds.sessionHost2.vmName $rdsSh3VmFqdn = $config.sre.rds.sessionHost3.fqdn $rdsSh3VmName = $config.sre.rds.sessionHost3.vmName +$researchUserSgName = $config.sre.domain.securityGroups.researchUsers.name +$reviewUserSgName = $config.sre.domain.securityGroups.reviewUsers.name $shmDcAdminPassword = Resolve-KeyVaultSecret -VaultName $config.shm.keyVault.name -SecretName $config.shm.keyVault.secretNames.domainAdminPassword $shmDcAdminUsername = Resolve-KeyVaultSecret -VaultName $config.shm.keyVault.name -SecretName $config.shm.keyVault.secretNames.vmAdminUsername -DefaultValue "shm$($config.shm.id)admin".ToLower() $shmNetbiosName = $config.shm.domain.netbiosName @@ -246,8 +248,33 @@ Add-LogMessage -Level Info "Upload RDS deployment scripts to storage..." # Expand deploy script $deployScriptLocalFilePath = (New-TemporaryFile).FullName -$template = Get-Content (Join-Path $PSScriptRoot ".." "remote" "create_rds" "templates" "Deploy_RDS_Environment.template.ps1") -Raw -$ExecutionContext.InvokeCommand.ExpandString($template) | Out-File $deployScriptLocalFilePath +# $template = Get-Content (Join-Path $PSScriptRoot ".." "remote" "create_rds" "templates" "Deploy_RDS_Environment.template.ps1") -Raw +# $ExecutionContext.InvokeCommand.ExpandString($template) | Out-File $deployScriptLocalFilePath +$template = Join-Path $PSScriptRoot ".." "remote" "create_rds" "templates" "Deploy_RDS_Environment.template.ps1" | Get-Item | Get-Content -Raw +$template.Replace('', $shmNetbiosName). + Replace('', $rdsGatewayVmName). + Replace('',$rdsSh1VmName). + Replace('',$rdsSh2VmName). + Replace('',$rdsSh3VmName). + Replace('',$rdsGatewayVmFqdn). + Replace('',$rdsSh1VmFqdn). + Replace('',$rdsSh2VmFqdn). + Replace('',$rdsSh3VmFqdn). + Replace('',$researchUserSgName). + Replace('',$reviewUserSgName). + Replace('',$dataSubnetIpPrefix). + Replace('',$airlockSubnetIpPrefix). + Replace('',$shmDcAdminUsername). + Replace('',$remoteUploadDir). + Replace('',$example). + Replace('',$example). + Replace('',$example). + Replace('',$example). + Replace('',$example). + Replace('',$example). + Replace('',$config.shm.domain.netbiosName) | Out-File $deployScriptLocalFilePath + + # Expand server list XML $serverListLocalFilePath = (New-TemporaryFile).FullName diff --git a/environment_configs/full/sre_mortestsandbox_full_config.json b/environment_configs/full/sre_mortestsandbox_full_config.json index 2ecf56a4a0..e71355e1e9 100644 --- a/environment_configs/full/sre_mortestsandbox_full_config.json +++ b/environment_configs/full/sre_mortestsandbox_full_config.json @@ -152,6 +152,10 @@ "researchUsers": { "name": "SG SANDBOX Research Users", "description": "SG SANDBOX Research Users" + }, + "reviewUsers": { + "name": "SG SANDBOX Research Users", + "description": "SG SANDBOX Research Users" } } }, @@ -294,6 +298,11 @@ "hostname": "DKP-SRE-SANDBOX", "fqdn": "DKP-SRE-SANDBOX.mortest.dsgroupdev.co.uk", "ip": "10.150.9.248" + }, + "sessionHost3": { + "vmName": "REV-SRE-SANDBOX", + "vmSize": "Standard_DS2_v2", + "nsg": "NSG_SRE_SANDBOX_RDS_SESSION_HOSTS" } }, "dataserver": { From 7c8d3778abf04423e113a9ef4c303f6b40418ed1 Mon Sep 17 00:00:00 2001 From: James Robinson Date: Tue, 28 Apr 2020 13:48:36 +0100 Subject: [PATCH 010/155] Updated RDS deployment configuration --- deployment/common/Configuration.psm1 | 5 +- .../arm_templates/sre-rds-template.json | 11 +- .../scripts/Install_Signed_Ssl_Cert.ps1 | 14 +- .../Deploy_RDS_Environment.template.ps1 | 137 +++++++++------- .../templates/ServerList.template.xml | 11 +- .../setup/Setup_SRE_VNET_RDS.ps1 | 152 ++++++++---------- .../full/sre_mortestsandbox_full_config.json | 9 +- 7 files changed, 176 insertions(+), 163 deletions(-) diff --git a/deployment/common/Configuration.psm1 b/deployment/common/Configuration.psm1 index e60a10998b..07edeeeb24 100644 --- a/deployment/common/Configuration.psm1 +++ b/deployment/common/Configuration.psm1 @@ -300,7 +300,7 @@ function Add-SreConfig { $serverAdminsGroup = "SG $($config.sre.domain.netbiosName) Server Administrators" $sqlAdminsGroup = "SG $($config.sre.domain.netbiosName) SQL Server Administrators" $researchUsersGroup = "SG $($config.sre.domain.netbiosName) Research Users" - $reviewUsers= "SG $($config.sre.domain.netbiosName) Research Users" + $reviewUsers= "SG $($config.sre.domain.netbiosName) Review Users" $config.sre.domain.securityGroups = [ordered]@{ serverAdmins = [ordered]@{ name = $serverAdminsGroup @@ -498,6 +498,9 @@ function Add-SreConfig { $config.sre.rds.sessionHost2.hostname = $config.sre.rds.sessionHost2.vmName $config.sre.rds.sessionHost2.fqdn = "$($config.sre.rds.sessionHost2.hostname).$($config.shm.domain.fqdn)" $config.sre.rds.sessionHost2.ip = "$($config.sre.network.subnets.rds.prefix).248" + $config.sre.rds.sessionHost3.hostname = $config.sre.rds.sessionHost3.vmName + $config.sre.rds.sessionHost3.fqdn = "$($config.sre.rds.sessionHost3.hostname).$($config.shm.domain.fqdn)" + $config.sre.rds.sessionHost3.ip = "$($config.sre.network.subnets.rds.prefix).247" # --- Secure servers --- diff --git a/deployment/secure_research_environment/arm_templates/sre-rds-template.json b/deployment/secure_research_environment/arm_templates/sre-rds-template.json index 8f6f54b2d3..6d61413870 100644 --- a/deployment/secure_research_environment/arm_templates/sre-rds-template.json +++ b/deployment/secure_research_environment/arm_templates/sre-rds-template.json @@ -207,7 +207,8 @@ "vnetID": "[resourceId(parameters('Virtual_Network_Resource_Group'), 'Microsoft.Network/virtualNetworks', parameters('Virtual_Network_Name'))]", "subnet": "[concat(variables('vnetID'),'/subnets/', parameters('Virtual_Network_Subnet'))]" }, - "resources": [{ + "resources": [ + { "type": "Microsoft.Compute/virtualMachines", "name": "[parameters('RDS_Gateway_Name')]", "apiVersion": "2018-06-01", @@ -244,7 +245,7 @@ "managedDisk": { "storageAccountType": "Standard_LRS" }, - "diskSizeGB": 511 + "diskSizeGB": 512 }, { "lun": 1, @@ -255,10 +256,10 @@ "managedDisk": { "storageAccountType": "Standard_LRS" }, - "diskSizeGB": 511 + "diskSizeGB": 512 }, { - "lun": 1, + "lun": 2, "name": "[concat(parameters('RDS_Gateway_Name'),'-DATA-DISK-3')]", "createOption": "Empty", "caching": "None", @@ -266,7 +267,7 @@ "managedDisk": { "storageAccountType": "Standard_LRS" }, - "diskSizeGB": 511 + "diskSizeGB": 512 } ] }, diff --git a/deployment/secure_research_environment/remote/create_rds/scripts/Install_Signed_Ssl_Cert.ps1 b/deployment/secure_research_environment/remote/create_rds/scripts/Install_Signed_Ssl_Cert.ps1 index 9986f55b9c..5c1c43d5b5 100644 --- a/deployment/secure_research_environment/remote/create_rds/scripts/Install_Signed_Ssl_Cert.ps1 +++ b/deployment/secure_research_environment/remote/create_rds/scripts/Install_Signed_Ssl_Cert.ps1 @@ -32,24 +32,22 @@ if ($null -ne $certificate) { # Update RDS roles to use new certificate by thumbprint # ----------------------------------------------------- Write-Host "Updating RDS roles to use new certificate..." -Set-RDCertificate -Role RDPublishing -Thumbprint $certificate.Thumbprint -ConnectionBroker $rdsFqdn -Force +Set-RDCertificate -Role RDPublishing -Thumbprint $certificate.Thumbprint -ConnectionBroker $rdsFqdn -ErrorAction Stop -Force $success = $? -Set-RDCertificate -Role RDRedirector -Thumbprint $certificate.Thumbprint -ConnectionBroker $rdsFqdn -Force +Set-RDCertificate -Role RDRedirector -Thumbprint $certificate.Thumbprint -ConnectionBroker $rdsFqdn -ErrorAction Stop -Force $success = $success -and $? -Set-RDCertificate -Role RDWebAccess -Thumbprint $certificate.Thumbprint -ConnectionBroker $rdsFqdn -Force +Set-RDCertificate -Role RDWebAccess -Thumbprint $certificate.Thumbprint -ConnectionBroker $rdsFqdn -ErrorAction Stop -Force $success = $success -and $? -Set-RDCertificate -Role RDGateway -Thumbprint $certificate.Thumbprint -ConnectionBroker $rdsFqdn -Force +Set-RDCertificate -Role RDGateway -Thumbprint $certificate.Thumbprint -ConnectionBroker $rdsFqdn -ErrorAction Stop -Force $success = $success -and $? +Write-Host "Currently installed certificates:" +Get-RDCertificate -ConnectionBroker $rdsFqdn if($success) { Write-Host " [o] Successfully updated RDS roles" } else { Write-Host " [x] Failed to update RDS roles!" throw "Could not update RDS roles" } -Write-Host "Currently installed certificates:" -Get-RDCertificate -ConnectionBroker $rdsFqdn -Write-Host "`n" - # Extract a base64-encoded certificate # ------------------------------------ diff --git a/deployment/secure_research_environment/remote/create_rds/templates/Deploy_RDS_Environment.template.ps1 b/deployment/secure_research_environment/remote/create_rds/templates/Deploy_RDS_Environment.template.ps1 index 4c71ccdb52..bfc32b2c1e 100644 --- a/deployment/secure_research_environment/remote/create_rds/templates/Deploy_RDS_Environment.template.ps1 +++ b/deployment/secure_research_environment/remote/create_rds/templates/Deploy_RDS_Environment.template.ps1 @@ -7,6 +7,7 @@ Write-Output "Initialising data drives..." Stop-Service ShellHWDetection $CandidateRawDisks = Get-Disk | Where-Object {$_.PartitionStyle -eq 'raw'} | Sort -Property Number foreach ($RawDisk in $CandidateRawDisks) { + Write-Output "Configuring disk $($RawDisk.Number)" $LUN = (Get-WmiObject Win32_DiskDrive | Where-Object index -eq $RawDisk.Number | Select-Object SCSILogicalUnit -ExpandProperty SCSILogicalUnit) $_ = Initialize-Disk -PartitionStyle GPT -Number $RawDisk.Number $Partition = New-Partition -DiskNumber $RawDisk.Number -UseMaximumSize -AssignDriveLetter @@ -15,25 +16,15 @@ foreach ($RawDisk in $CandidateRawDisks) { Start-Service ShellHWDetection -# Setup user profile disk shares -# ------------------------------ -Write-Output "Creating user profile disk shares..." -foreach ($sharePath in ("F:\AppFileShares", "G:\RDPFileShares", "H:\ReviewFileShares")) { - $_ = New-Item -ItemType Directory -Force -Path $sharePath - if($null -eq $(Get-SmbShare | Where-Object -Property Path -eq $sharePath)) { - New-SmbShare -Path $sharePath -Name $sharePath.Split("\")[1] -FullAccess "\$","\$","\$","\$","\Domain Admins" - } -} - - # Remove any old RDS settings # --------------------------- +Write-Output "Removing any old RDS settings..." foreach ($collection in $(Get-RDSessionCollection -ErrorAction SilentlyContinue)) { - Write-Output "Removing existing RDSessionCollection: '$collection.CollectionName' (and associated apps)" + Write-Output "... removing existing RDSessionCollection: '$($collection.CollectionName)'" Remove-RDSessionCollection -CollectionName $collection.CollectionName -Force -ErrorAction SilentlyContinue } foreach ($server in $(Get-RDServer -ErrorAction SilentlyContinue)) { - Write-Output "Removing existing RDServer: '$($server.Server)'" + Write-Output "... removing existing RDServer: '$($server.Server)'" foreach ($role in $server.Roles) { Remove-RDServer -Server $server.Server -Role $role -Force -ErrorAction SilentlyContinue } @@ -43,75 +34,109 @@ foreach ($server in $(Get-RDServer -ErrorAction SilentlyContinue)) { # Create RDS Environment # ---------------------- Write-Output "Creating RDS Environment..." -New-RDSessionDeployment -ConnectionBroker "" -WebAccessServer "" -SessionHost @("", "", "") -Add-RDServer -Server -Role RDS-LICENSING -ConnectionBroker -Set-RDLicenseConfiguration -LicenseServer -Mode PerUser -ConnectionBroker -Force -Add-WindowsFeature -Name RDS-Gateway -IncludeAllSubFeature -Add-RDServer -Server -Role RDS-GATEWAY -ConnectionBroker -GatewayExternalFqdn $sreFqdn +try { + # Setup licensing server + New-RDSessionDeployment -ConnectionBroker "" -WebAccessServer "" -SessionHost @("", "", "") -ErrorAction Stop + Add-RDServer -Server -Role RDS-LICENSING -ConnectionBroker -ErrorAction Stop + Set-RDLicenseConfiguration -LicenseServer -Mode PerUser -ConnectionBroker -Force -ErrorAction Stop + # Setup gateway server + $_ = Add-WindowsFeature -Name RDS-Gateway -IncludeAllSubFeature -ErrorAction Stop + Add-RDServer -Server -Role RDS-GATEWAY -ConnectionBroker -GatewayExternalFqdn -ErrorAction Stop + Set-RDWorkspace -Name "Safe Haven Applications" -ConnectionBroker + Write-Output " [o] RDS environment configuration update succeeded" +} catch { + Write-Output " [x] RDS environment configuration update failed!" + throw +} # Create collections # ------------------ -$collectionName = "Remote Applications" -Write-Output "Creating '$collectionName' collection..." -New-RDSessionCollection -CollectionName "$collectionName" -SessionHost -ConnectionBroker -Set-RDSessionCollectionConfiguration -CollectionName "$collectionName" -UserGroup "\" -ClientPrinterRedirected $false -ClientDeviceRedirectionOptions None -DisconnectedSessionLimitMin 5 -IdleSessionLimitMin 720 -ConnectionBroker -Set-RDSessionCollectionConfiguration -CollectionName "$collectionName" -EnableUserProfileDisk -MaxUserProfileDiskSizeGB "20" -DiskPath \\\AppFileShares -ConnectionBroker +foreach($rdsConfiguration in @(("Applications", "", "", "F:\AppFileShares"), + ("Desktop (Windows)", "", "", "G:\RDPFileShares"), + ("Review", "", "", "H:\ReviewFileShares"))) { + $collectionName, $sessionHost, $userGroup, $sharePath = $rdsConfiguration -$collectionName = "Presentation Server" -Write-Output "Creating '$collectionName' collection..." -New-RDSessionCollection -CollectionName "$collectionName" -SessionHost -ConnectionBroker -Set-RDSessionCollectionConfiguration -CollectionName "$collectionName" -UserGroup "\" -ClientPrinterRedirected $false -ClientDeviceRedirectionOptions None -DisconnectedSessionLimitMin 5 -IdleSessionLimitMin 720 -ConnectionBroker -Set-RDSessionCollectionConfiguration -CollectionName "$collectionName" -EnableUserProfileDisk -MaxUserProfileDiskSizeGB "20" -DiskPath \\\RDPFileShares -ConnectionBroker + # Setup user profile disk shares + Write-Output "Creating user profile disk shares..." + $_ = New-Item -ItemType Directory -Force -Path $sharePath + $shareName = $sharePath.Split("\")[1] + $sessionHostComputerName = $sessionHost.Split(".")[0] + if ($null -eq $(Get-SmbShare | Where-Object -Property Path -eq $sharePath)) { + $_ = New-SmbShare -Path $sharePath -Name $shareName -FullAccess "\$","\${sessionHostComputerName}$","\Domain Admins" + } -$collectionName = "Review" -Write-Output "Creating '$collectionName' collection..." -New-RDSessionCollection -CollectionName "$collectionName" -SessionHost -ConnectionBroker -Set-RDSessionCollectionConfiguration -CollectionName "$collectionName" -UserGroup "\" -ClientPrinterRedirected $false -ClientDeviceRedirectionOptions None -DisconnectedSessionLimitMin 5 -IdleSessionLimitMin 720 -ConnectionBroker -Set-RDSessionCollectionConfiguration -CollectionName "$collectionName" -EnableUserProfileDisk -MaxUserProfileDiskSizeGB "20" -DiskPath \\\ReviewFileShares -ConnectionBroker + # Create collections + Write-Output "Creating '$collectionName' collection..." + try { + $_ = New-RDSessionCollection -CollectionName "$collectionName" -SessionHost "$sessionHost" -ConnectionBroker -ErrorAction Stop + $_ = Set-RDSessionCollectionConfiguration -CollectionName "$collectionName" -UserGroup "\$userGroup" -ClientPrinterRedirected $false -ClientDeviceRedirectionOptions None -DisconnectedSessionLimitMin 5 -IdleSessionLimitMin 720 -ConnectionBroker -ErrorAction Stop + $_ = Set-RDSessionCollectionConfiguration -CollectionName "$collectionName" -EnableUserProfileDisk -MaxUserProfileDiskSizeGB "20" -DiskPath "\\\$shareName" -ConnectionBroker -ErrorAction Stop + Write-Output " [o] Creating '$collectionName' collection succeeded" + } catch { + Write-Output " [x] Creating '$collectionName' collection failed!" + throw + } +} # Create applications # ------------------- -Write-Output "Creating applications..." -New-RDRemoteApp -Alias "mstsc (1)" -DisplayName "DSVM Main (Desktop)" -FilePath "C:\Windows\system32\mstsc.exe" -ShowInWebAccess 1 -CommandLineSetting Require -RequiredCommandLine "-v .160" -CollectionName "Remote Applications" -ConnectionBroker -New-RDRemoteApp -Alias "putty (1)" -DisplayName "DSVM Main (SSH)" -FilePath "C:\Program Files\PuTTY\putty.exe" -ShowInWebAccess 1 -CommandLineSetting Require -RequiredCommandLine "-ssh .160" -CollectionName "Remote Applications" -ConnectionBroker -New-RDRemoteApp -Alias "mstsc (2)" -DisplayName "DSVM Other (Desktop)" -FilePath "C:\Windows\system32\mstsc.exe" -ShowInWebAccess 1 -CollectionName "Remote Applications" -ConnectionBroker -New-RDRemoteApp -Alias "putty (2)" -DisplayName "DSVM Other (SSH)" -FilePath "C:\Program Files\PuTTY\putty.exe" -ShowInWebAccess 1 -CollectionName "Remote Applications" -ConnectionBroker -New-RDRemoteApp -Alias WinSCP -DisplayName "File Transfer" -FilePath "C:\Program Files (x86)\WinSCP\WinSCP.exe" -ShowInWebAccess 1 -CollectionName "Remote Applications" -ConnectionBroker -New-RDRemoteApp -Alias "chrome (1)" -DisplayName "GitLab" -FilePath "C:\Program Files (x86)\Google\Chrome\Application\chrome.exe" -ShowInWebAccess 1 -CommandLineSetting Require -RequiredCommandLine "http://.151" -CollectionName "Remote Applications" -ConnectionBroker -New-RDRemoteApp -Alias "chrome (2)" -DisplayName "HackMD" -FilePath "C:\Program Files (x86)\Google\Chrome\Application\chrome.exe" -ShowInWebAccess 1 -CommandLineSetting Require -RequiredCommandLine "http://.152:3000" -CollectionName "Remote Applications" -ConnectionBroker -New-RDRemoteApp -Alias "chrome (3)" -DisplayName "GitLab Review" -FilePath "C:\Program Files (x86)\Google\Chrome\Application\chrome.exe" -ShowInWebAccess 1 -CommandLineSetting Require -RequiredCommandLine "http://.151" -CollectionName "Review" -ConnectionBroker - +Write-Output "Registering applications..." +Get-RDRemoteApp | Remove-RDRemoteApp -Force -ErrorAction SilentlyContinue +try { + $_ = New-RDRemoteApp -Alias "chrome (1)" -DisplayName "Code Review" -FilePath "C:\Program Files (x86)\Google\Chrome\Application\chrome.exe" -ShowInWebAccess 1 -CommandLineSetting Require -RequiredCommandLine "http://.151" -CollectionName "Review" -ConnectionBroker -ErrorAction Stop + $_ = New-RDRemoteApp -Alias "mstsc (1)" -DisplayName "DSVM Main (Desktop)" -FilePath "C:\Windows\system32\mstsc.exe" -ShowInWebAccess 1 -CommandLineSetting Require -RequiredCommandLine "-v .160" -CollectionName "Applications" -ConnectionBroker -ErrorAction Stop + $_ = New-RDRemoteApp -Alias "mstsc (2)" -DisplayName "DSVM Other (Desktop)" -FilePath "C:\Windows\system32\mstsc.exe" -ShowInWebAccess 1 -CollectionName "Applications" -ConnectionBroker -ErrorAction Stop + $_ = New-RDRemoteApp -Alias "chrome (2)" -DisplayName "GitLab" -FilePath "C:\Program Files (x86)\Google\Chrome\Application\chrome.exe" -ShowInWebAccess 1 -CommandLineSetting Require -RequiredCommandLine "http://.151" -CollectionName "Applications" -ConnectionBroker -ErrorAction Stop + $_ = New-RDRemoteApp -Alias "chrome (3)" -DisplayName "HackMD" -FilePath "C:\Program Files (x86)\Google\Chrome\Application\chrome.exe" -ShowInWebAccess 1 -CommandLineSetting Require -RequiredCommandLine "http://.152:3000" -CollectionName "Applications" -ConnectionBroker -ErrorAction Stop + $_ = New-RDRemoteApp -Alias "putty (1)" -DisplayName "SSH (DSVM Main)" -FilePath "C:\Program Files\PuTTY\putty.exe" -ShowInWebAccess 1 -CommandLineSetting Require -RequiredCommandLine "-ssh .160" -CollectionName "Applications" -ConnectionBroker -ErrorAction Stop + $_ = New-RDRemoteApp -Alias "putty (2)" -DisplayName "SSH (DSVM Other)" -FilePath "C:\Program Files\PuTTY\putty.exe" -ShowInWebAccess 1 -CollectionName "Applications" -ConnectionBroker -ErrorAction Stop + # $_ = New-RDRemoteApp -Alias "WinSCP" -DisplayName "File Transfer" -FilePath "C:\Program Files (x86)\WinSCP\WinSCP.exe" -ShowInWebAccess 1 -CollectionName "Applications" -ConnectionBroker -ErrorAction Stop + Write-Output " [o] Registering applications succeeded" +} catch { + Write-Output " [x] Registering applications failed!" + throw +} # Update server configuration # --------------------------- Write-Output "Updating server configuration..." -Get-Process ServerManager -ErrorAction SilentlyContinue | Stop-Process -Force -foreach ($targetDirectory in @("C:\Users\\AppData\Roaming\Microsoft\Windows\ServerManager", - "C:\Users\.\AppData\Roaming\Microsoft\Windows\ServerManager")) { - $_ = New-Item -ItemType Directory -Force -Path $targetDirectory - Copy-Item -Path "\ServerList.xml" -Destination "$targetDirectory\ServerList.xml" -Force -} -Start-Process -FilePath $env:SystemRoot\System32\ServerManager.exe -WindowStyle Maximized -if ($?) { +try { + + Get-Process ServerManager -ErrorAction SilentlyContinue | Stop-Process -Force + foreach ($targetDirectory in @("C:\Users\\AppData\Roaming\Microsoft\Windows\ServerManager", + "C:\Users\.\AppData\Roaming\Microsoft\Windows\ServerManager")) { + $_ = New-Item -ItemType Directory -Path $targetDirectory -Force -ErrorAction Stop + Copy-Item -Path "\ServerList.xml" -Destination "$targetDirectory\ServerList.xml" -Force -ErrorAction Stop + } + Start-Process -FilePath $env:SystemRoot\System32\ServerManager.exe -WindowStyle Maximized -ErrorAction Stop Write-Output " [o] Server configuration update succeeded" -} else { +} catch { Write-Output " [x] Server configuration update failed!" + throw } # Install RDS webclient # --------------------- Write-Output "Installing RDS webclient..." -Install-RDWebClientPackage -if ($?) { +try { + Install-RDWebClientPackage -ErrorAction Stop Write-Output " [o] RDS webclient installation succeeded" -} else { +} catch { Write-Output " [x] RDS webclient installation failed!" + throw } # Remove the requirement for the /RDWeb/webclient/ suffix by setting up a redirect in IIS # --------------------------------------------------------------------------------------- -Set-WebConfiguration system.webServer/httpRedirect "IIS:\sites\Default Web Site" -Value @{enabled="true";destination="/RDWeb/webclient/";httpResponseStatus="Permanent"} +Write-Output "Setting up IIS redirect..." +try { + Set-WebConfiguration system.webServer/httpRedirect "IIS:\sites\Default Web Site" -Value @{enabled="true";destination="/RDWeb/webclient/";httpResponseStatus="Permanent"} -ErrorAction Stop + Write-Output " [o] IIS redirection succeeded" +} catch { + Write-Output " [x] IIS redirection failed!" + throw +} diff --git a/deployment/secure_research_environment/remote/create_rds/templates/ServerList.template.xml b/deployment/secure_research_environment/remote/create_rds/templates/ServerList.template.xml index cb91d1f3ad..b866aed06a 100755 --- a/deployment/secure_research_environment/remote/create_rds/templates/ServerList.template.xml +++ b/deployment/secure_research_environment/remote/create_rds/templates/ServerList.template.xml @@ -1,6 +1,7 @@ - - - - - + + + + + + \ No newline at end of file diff --git a/deployment/secure_research_environment/setup/Setup_SRE_VNET_RDS.ps1 b/deployment/secure_research_environment/setup/Setup_SRE_VNET_RDS.ps1 index 3e2d86532e..f330278553 100644 --- a/deployment/secure_research_environment/setup/Setup_SRE_VNET_RDS.ps1 +++ b/deployment/secure_research_environment/setup/Setup_SRE_VNET_RDS.ps1 @@ -23,22 +23,22 @@ $_ = Set-AzContext -SubscriptionId $config.sre.subscriptionName $_ = Deploy-ResourceGroup -Name $config.sre.network.vnet.rg -Location $config.sre.location -# Create VNet from template -# ------------------------- -Add-LogMessage -Level Info "Creating virtual network '$($config.sre.network.vnet.name)' from template..." -$params = @{ - "Virtual Network Name" = $config.sre.network.vnet.Name - "Virtual Network Address Space" = $config.sre.network.vnet.cidr - "Subnet-Identity Address Prefix" = $config.sre.network.subnets.identity.cidr - "Subnet-RDS Address Prefix" = $config.sre.network.subnets.rds.cidr - "Subnet-Data Address Prefix" = $config.sre.network.subnets.data.cidr - "Subnet-Identity Name" = $config.sre.network.subnets.identity.Name - "Subnet-RDS Name" = $config.sre.network.subnets.rds.Name - "Subnet-Data Name" = $config.sre.network.subnets.data.Name - "VNET_DNS_DC1" = $config.shm.dc.ip - "VNET_DNS_DC2" = $config.shm.dcb.ip -} -Deploy-ArmTemplate -TemplatePath (Join-Path $PSScriptRoot ".." "arm_templates" "sre-vnet-gateway-template.json") -Params $params -ResourceGroupName $config.sre.network.vnet.rg +# # Create VNet from template +# # ------------------------- +# Add-LogMessage -Level Info "Creating virtual network '$($config.sre.network.vnet.name)' from template..." +# $params = @{ +# "Virtual Network Name" = $config.sre.network.vnet.Name +# "Virtual Network Address Space" = $config.sre.network.vnet.cidr +# "Subnet-Identity Address Prefix" = $config.sre.network.subnets.identity.cidr +# "Subnet-RDS Address Prefix" = $config.sre.network.subnets.rds.cidr +# "Subnet-Data Address Prefix" = $config.sre.network.subnets.data.cidr +# "Subnet-Identity Name" = $config.sre.network.subnets.identity.Name +# "Subnet-RDS Name" = $config.sre.network.subnets.rds.Name +# "Subnet-Data Name" = $config.sre.network.subnets.data.Name +# "VNET_DNS_DC1" = $config.shm.dc.ip +# "VNET_DNS_DC2" = $config.shm.dcb.ip +# } +# Deploy-ArmTemplate -TemplatePath (Join-Path $PSScriptRoot ".." "arm_templates" "sre-vnet-gateway-template.json") -Params $params -ResourceGroupName $config.sre.network.vnet.rg # Fetch VNet information @@ -53,50 +53,41 @@ $shmVnet = Get-AzVirtualNetwork -Name $config.shm.network.vnet.Name -ResourceGro # ------------------------ $shmPeeringName = "PEER_$($config.sre.network.vnet.Name)" $srePeeringName = "PEER_$($config.shm.network.vnet.Name)" -# From SHM VNet -$_ = Set-AzContext -SubscriptionId $config.shm.subscriptionName -if (Get-AzVirtualNetworkPeering -VirtualNetworkName $config.shm.network.vnet.name -ResourceGroupName $config.shm.network.vnet.rg) { - Add-LogMessage -Level Info "[ ] Removing existing peering '$shmPeeringName' from '$($config.sre.network.vnet.name)' to '$($config.shm.network.vnet.name)'..." - Remove-AzVirtualNetworkPeering -Name $shmPeeringName -VirtualNetworkName $config.shm.network.vnet.name -ResourceGroupName $config.shm.network.vnet.rg -Force - if ($?) { - Add-LogMessage -Level Success "Peering removal succeeded" - } else { - Add-LogMessage -Level Fatal "Peering removal failed!" +try { + # From SHM VNet + $_ = Set-AzContext -SubscriptionId $config.shm.subscriptionName -ErrorAction Stop + if (Get-AzVirtualNetworkPeering -VirtualNetworkName $config.shm.network.vnet.name -ResourceGroupName $config.shm.network.vnet.rg -ErrorAction Stop) { + Add-LogMessage -Level Info "[ ] Removing existing peering '$shmPeeringName' from '$($config.sre.network.vnet.name)' to '$($config.shm.network.vnet.name)'..." + Remove-AzVirtualNetworkPeering -Name $shmPeeringName -VirtualNetworkName $config.shm.network.vnet.name -ResourceGroupName $config.shm.network.vnet.rg -Force -ErrorAction Stop } -} -# From SRE VNet -$_ = Set-AzContext -SubscriptionId $config.sre.subscriptionName -if (Get-AzVirtualNetworkPeering -VirtualNetworkName $config.sre.network.vnet.name -ResourceGroupName $config.sre.network.vnet.rg) { - Add-LogMessage -Level Info "[ ] Removing existing peering '$srePeeringName' from '$($config.shm.network.vnet.name)' to '$($config.sre.network.vnet.name)'..." - Remove-AzVirtualNetworkPeering -Name $srePeeringName -VirtualNetworkName $config.sre.network.vnet.name -ResourceGroupName $config.sre.network.vnet.rg -Force - if ($?) { - Add-LogMessage -Level Success "Peering removal succeeded" - } else { - Add-LogMessage -Level Fatal "Peering removal failed!" + # From SRE VNet + $_ = Set-AzContext -SubscriptionId $config.sre.subscriptionName -ErrorAction Stop + if (Get-AzVirtualNetworkPeering -VirtualNetworkName $config.sre.network.vnet.name -ResourceGroupName $config.sre.network.vnet.rg -ErrorAction Stop) { + Add-LogMessage -Level Info "[ ] Removing existing peering '$srePeeringName' from '$($config.shm.network.vnet.name)' to '$($config.sre.network.vnet.name)'..." + Remove-AzVirtualNetworkPeering -Name $srePeeringName -VirtualNetworkName $config.sre.network.vnet.name -ResourceGroupName $config.sre.network.vnet.rg -Force -ErrorAction Stop } -} - -# Add peering to SHM Vnet -# ----------------------- -$_ = Set-AzContext -SubscriptionId $config.shm.subscriptionName -Add-LogMessage -Level Info "[ ] Adding peering '$shmPeeringName' from '$($config.sre.network.vnet.name)' to '$($config.shm.network.vnet.name)'..." -$_ = Add-AzVirtualNetworkPeering -Name $shmPeeringName -VirtualNetwork $shmVnet -RemoteVirtualNetworkId $sreVnet.Id -AllowGatewayTransit -if ($?) { - Add-LogMessage -Level Success "Peering '$($config.sre.network.vnet.name)' to '$($config.shm.network.vnet.name)' succeeded" -} else { - Add-LogMessage -Level Fatal "Peering '$($config.sre.network.vnet.name)' to '$($config.shm.network.vnet.name)' failed!" + # Success log message + Add-LogMessage -Level Success "Peering removal succeeded" +} catch { + Add-LogMessage -Level Fatal "Peering removal failed!" } -# Add peering to SRE VNet -# ----------------------- -$_ = Set-AzContext -SubscriptionId $config.sre.subscriptionName -Add-LogMessage -Level Info "[ ] Adding peering '$srePeeringName' from '$($config.shm.network.vnet.name)' to '$($config.sre.network.vnet.name)'..." -$_ = Add-AzVirtualNetworkPeering -Name $srePeeringName -VirtualNetwork $sreVnet -RemoteVirtualNetworkId $shmVnet.Id -UseRemoteGateways -if ($?) { - Add-LogMessage -Level Success "Peering '$($config.shm.network.vnet.name)' to '$($config.sre.network.vnet.name)' succeeded" -} else { - Add-LogMessage -Level Fatal "Peering '$($config.shm.network.vnet.name)' to '$($config.sre.network.vnet.name)' failed!" +# Add new peerings between SHM and SRE VNets +# ------------------------------------------ +try { + $_ = Set-AzContext -SubscriptionId $config.shm.subscriptionName -ErrorAction Stop + Add-LogMessage -Level Info "[ ] Adding peering '$shmPeeringName' from '$($config.sre.network.vnet.name)' to '$($config.shm.network.vnet.name)'..." + $_ = Add-AzVirtualNetworkPeering -Name $shmPeeringName -VirtualNetwork $shmVnet -RemoteVirtualNetworkId $sreVnet.Id -AllowGatewayTransit -ErrorAction Stop + # Add peering to SRE VNet + # ----------------------- + $_ = Set-AzContext -SubscriptionId $config.sre.subscriptionName -ErrorAction Stop + Add-LogMessage -Level Info "[ ] Adding peering '$srePeeringName' from '$($config.shm.network.vnet.name)' to '$($config.sre.network.vnet.name)'..." + $_ = Add-AzVirtualNetworkPeering -Name $srePeeringName -VirtualNetwork $sreVnet -RemoteVirtualNetworkId $shmVnet.Id -UseRemoteGateways -ErrorAction Stop + # Success log message + Add-LogMessage -Level Success "Peering '$($config.shm.network.vnet.name)' and '$($config.sre.network.vnet.name)' succeeded" +} catch { + Add-LogMessage -Level Fatal "Peering '$($config.shm.network.vnet.name)' and '$($config.sre.network.vnet.name)' failed!" } @@ -248,38 +239,33 @@ Add-LogMessage -Level Info "Upload RDS deployment scripts to storage..." # Expand deploy script $deployScriptLocalFilePath = (New-TemporaryFile).FullName -# $template = Get-Content (Join-Path $PSScriptRoot ".." "remote" "create_rds" "templates" "Deploy_RDS_Environment.template.ps1") -Raw -# $ExecutionContext.InvokeCommand.ExpandString($template) | Out-File $deployScriptLocalFilePath $template = Join-Path $PSScriptRoot ".." "remote" "create_rds" "templates" "Deploy_RDS_Environment.template.ps1" | Get-Item | Get-Content -Raw -$template.Replace('', $shmNetbiosName). - Replace('', $rdsGatewayVmName). - Replace('',$rdsSh1VmName). - Replace('',$rdsSh2VmName). - Replace('',$rdsSh3VmName). +$template.Replace('',$airlockSubnetIpPrefix). + Replace('',$dataSubnetIpPrefix). Replace('',$rdsGatewayVmFqdn). + Replace('', $rdsGatewayVmName). Replace('',$rdsSh1VmFqdn). + Replace('',$rdsSh1VmName). Replace('',$rdsSh2VmFqdn). + Replace('',$rdsSh2VmName). Replace('',$rdsSh3VmFqdn). + Replace('',$rdsSh3VmName). + Replace('',$remoteUploadDir). Replace('',$researchUserSgName). Replace('',$reviewUserSgName). - Replace('',$dataSubnetIpPrefix). - Replace('',$airlockSubnetIpPrefix). Replace('',$shmDcAdminUsername). - Replace('',$remoteUploadDir). - Replace('',$example). - Replace('',$example). - Replace('',$example). - Replace('',$example). - Replace('',$example). - Replace('',$example). - Replace('',$config.shm.domain.netbiosName) | Out-File $deployScriptLocalFilePath - - + Replace('', $shmNetbiosName). + Replace('',$sreFqdn) | Out-File $deployScriptLocalFilePath # Expand server list XML $serverListLocalFilePath = (New-TemporaryFile).FullName -$template = Get-Content (Join-Path $PSScriptRoot ".." "remote" "create_rds" "templates" "ServerList.template.xml") -Raw -$ExecutionContext.InvokeCommand.ExpandString($template) | Out-File $serverListLocalFilePath +$template = Join-Path $PSScriptRoot ".." "remote" "create_rds" "templates" "ServerList.template.xml" | Get-Item | Get-Content -Raw +$template.Replace('',$rdsGatewayVmFqdn). + Replace('', $rdsGatewayVmName). + Replace('',$rdsSh1VmFqdn). + Replace('',$rdsSh2VmFqdn). + Replace('',$rdsSh3VmFqdn). + Replace('',$sreFqdn) | Out-File $serverListLocalFilePath # Copy existing files Add-LogMessage -Level Info "[ ] Copying RDS installers to storage account '$($sreStorageAccount.StorageAccountName)'" @@ -368,7 +354,10 @@ $_ = Set-AzContext -SubscriptionId $config.sre.subscriptionName foreach ($nameVMNameParamsPair in $vmNamePairs) { $name, $vmName = $nameVMNameParamsPair Add-LogMessage -Level Info "Updating ${name}: '$vmName'..." - Invoke-WindowsConfigureAndUpdate -VMName $vmName -ResourceGroupName $config.sre.rds.rg -CommonPowershellPath (Join-Path $PSScriptRoot ".." ".." "common") + $params = @{} + # The RDS Gateway needs the RDWebClientManagement Powershell module + if ($name -eq "RDS Gateway") { $params["AdditionalPowershellModules"] = @("RDWebClientManagement") } + Invoke-WindowsConfigureAndUpdate -VMName $vmName -ResourceGroupName $config.sre.rds.rg @params } @@ -381,6 +370,7 @@ $_ = Set-AzContext -SubscriptionId $config.shm.subscriptionName Add-LogMessage -Level Info "[ ] Getting list of packages for each VM" $filePathsSh1 = New-Object System.Collections.ArrayList ($null) $filePathsSh2 = New-Object System.Collections.ArrayList ($null) +$filePathsSh3 = New-Object System.Collections.ArrayList ($null) foreach ($blob in Get-AzStorageBlob -Container $containerNameSessionHosts -Context $sreStorageAccount.Context) { if (($blob.Name -like "*GoogleChrome_x64.msi") -or ($blob.Name -like "*PuTTY_x64.msi") -or ($blob.Name -like "*WinSCP_x32.exe")) { $_ = $filePathsSh1.Add($blob.Name) @@ -470,14 +460,6 @@ foreach ($nameVMNameParamsPair in $vmNamePairs) { } -# Install required Powershell modules on RDS Gateway -# -------------------------------------------------- -Add-LogMessage -Level Info "[ ] Installing required Powershell modules on RDS Gateway..." -$scriptPath = Join-Path $PSScriptRoot ".." "remote" "create_rds" "scripts" "Install_Additional_Powershell_Modules.ps1" -$result = Invoke-RemoteScript -Shell "PowerShell" -ScriptPath $scriptPath -VMName $config.sre.rds.gateway.vmName -ResourceGroupName $config.sre.rds.rg -Write-Output $result.Value - - # Add VMs to correct NSG # ---------------------- Add-VmToNSG -VMName $config.sre.rds.gateway.vmName -NSGName $config.sre.rds.gateway.nsg diff --git a/environment_configs/full/sre_mortestsandbox_full_config.json b/environment_configs/full/sre_mortestsandbox_full_config.json index e71355e1e9..4f13e38136 100644 --- a/environment_configs/full/sre_mortestsandbox_full_config.json +++ b/environment_configs/full/sre_mortestsandbox_full_config.json @@ -154,8 +154,8 @@ "description": "SG SANDBOX Research Users" }, "reviewUsers": { - "name": "SG SANDBOX Research Users", - "description": "SG SANDBOX Research Users" + "name": "SG SANDBOX Review Users", + "description": "SG SANDBOX Review Users" } } }, @@ -302,7 +302,10 @@ "sessionHost3": { "vmName": "REV-SRE-SANDBOX", "vmSize": "Standard_DS2_v2", - "nsg": "NSG_SRE_SANDBOX_RDS_SESSION_HOSTS" + "nsg": "NSG_SRE_SANDBOX_RDS_SESSION_HOSTS", + "hostname": "REV-SRE-SANDBOX", + "fqdn": "REV-SRE-SANDBOX.cw20.dsgroupdev.co.uk", + "ip": "10.150.1.247" } }, "dataserver": { From 9a52480333ac3fce485f0c51e6838d786fafbfc3 Mon Sep 17 00:00:00 2001 From: Jack Roberts Date: Fri, 1 May 2020 11:44:53 +0100 Subject: [PATCH 011/155] Add gitlab server to gitlab external Needs testing. Also renames some parameters and secrets to make the distinction between external and internal clearer. --- deployment/common/Configuration.psm1 | 7 +- .../cloud-init-gitlab-external.template.yaml | 161 ++++++++++++++++-- .../cloud-init-gitlab-internal.template.yaml | 4 +- .../setup/Setup_SRE_WebApp_Servers.ps1 | 40 ++++- .../full/sre_mortestsandbox_full_config.json | 7 +- 5 files changed, 192 insertions(+), 27 deletions(-) diff --git a/deployment/common/Configuration.psm1 b/deployment/common/Configuration.psm1 index 07edeeeb24..ede35744bf 100644 --- a/deployment/common/Configuration.psm1 +++ b/deployment/common/Configuration.psm1 @@ -398,11 +398,14 @@ function Add-SreConfig { gitlabLdapPassword = "$($config.sre.shortName)-gitlab-ldap-password" gitlabRootPassword = "$($config.sre.shortName)-gitlab-root-password" gitlabUserPassword = "$($config.sre.shortName)-gitlab-user-password" + gitlabInternalUsername = "$($config.sre.shortName)-gitlab-internal-username" + gitlabInternalPassword = "$($config.sre.shortName)-gitlab-internal-password" + gitlabInternalAPIToken = "$($config.sre.shortName)-gitlab-internal-api-token" + hackmdLdapPassword = "$($config.sre.shortName)-hackmd-ldap-password" + hackmdUserPassword = "$($config.sre.shortName)-hackmd-user-password" gitlabExternalUsername = "$($config.sre.shortName)-gitlab-external-username" gitlabExternalPassword = "$($config.sre.shortName)-gitlab-external-password" gitlabExternalAPIToken = "$($config.sre.shortName)-gitlab-external-api-token" - hackmdLdapPassword = "$($config.sre.shortName)-hackmd-ldap-password" - hackmdUserPassword = "$($config.sre.shortName)-hackmd-user-password" letsEncryptCertificate = "$($config.sre.shortName)-lets-encrypt-certificate" npsSecret = "$($config.sre.shortName)-nps-secret" rdsAdminPassword = "$($config.sre.shortName)-rdsvm-admin-password" diff --git a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml index 57e5628b27..28306e04b0 100644 --- a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml +++ b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml @@ -3,13 +3,55 @@ package_update: true package_upgrade: true # Install LDAP tools for debugging LDAP issues -# !!!TODO openssh-server for access during development only!!!! packages: - git - - openssh-client + - apt-transport-https + - ca-certificates + - curl + - gitlab-ce + - gnupg + - ldap-utils - openssh-server + - postfix + +apt: + # Preserves the existing /etc/apt/sources.list + preserve_sources_list: true + + # Add repositories + sources: + gitlab.list: + source: "deb https://packages.gitlab.com/gitlab/gitlab-ce/ubuntu bionic main" + keyid: 3F01618A51312F3F write_files: + - path: /etc/gitlab/gitlab.rb + permissions: "0600" + content: | + external_url 'http://' + gitlab_rails['ldap_enabled'] = true + gitlab_rails['ldap_servers'] = YAML.load <<-'EOS' + main: # 'main' is the GitLab 'provider ID' of this LDAP server + label: 'LDAP' + host: '' + port: 389 + uid: 'sAMAccountName' + method: 'plain' # "tls" or "ssl" or "plain" + bind_dn: '' + password: '' + active_directory: true + allow_username_or_email_login: true + block_auto_created_users: false + base: '' + user_filter: '' + attributes: + username: ['uid', 'userid', 'sAMAccountName'] + email: ['mail', 'email', 'userPrincipalName'] + name: 'cn' + first_name: 'givenName' + last_name: 'sn' + EOS + git_data_dirs({ "default" => { "path" => "/datadrive/gitdata" } }) - path: "/home//.secrets/gitlab-internal-api-token" permissions: "0600" content: | @@ -17,7 +59,31 @@ write_files: - path: "/home//.secrets/gitlab-internal-ip-address" permissions: "0600" content: | - + + - path: "/home//.secrets/gitlab-internal-username" + permissions: "0600" + content: | + + - path: "/home//.secrets/gitlab-internal-user-email" + permissions: "0600" + content: | + @ + - path: "/home//.secrets/gitlab-external-api-token" + permissions: "0600" + content: | + + - path: "/home//.secrets/gitlab-external-ip-address" + permissions: "0600" + content: | + + - path: "/home//.secrets/gitlab-external-username" + permissions: "0600" + content: | + + - path: "/home//.secrets/gitlab-external-user-email" + permissions: "0600" + content: | + @ - path: "/home//update_from_whitelist.py" permissions: "0755" content: | @@ -50,6 +116,7 @@ write_files: params = {"owned": True, "simple": True}) gitlab_internal_repo_names = [repo["name"].lower() for repo in gitlab_internal_projects.json()] + for repo in whitelist: repo_name = repo["gitlab_name"] repo_path = os.path.join(repo_name) @@ -79,26 +146,96 @@ write_files: os.chdir("..") runcmd: + # -------------------------------- + # SETUP GITLAB EXTERNAL SERVER + # -------------------------------- + # Configure server + - echo "Configuring gitlab external server" + - echo " " >> /etc/hosts + - echo "Europe/London" > /etc/timezone + - dpkg-reconfigure -f noninteractive tzdata + # Set up the data disk + - echo "Setting up data disk..." + - parted /dev/sdc mklabel gpt + - parted /dev/sdc mkpart primary ext4 0% 100% + - parted /dev/sdc print + - sleep 5 + - mkfs -t ext4 /dev/sdc1 + - mkdir -p /datadrive + - mount /dev/sdc1 /datadrive + - UUID=$(blkid | grep "/dev/sdc1" | cut -d'"' -f2) + - sed "s|UUID|UUID=$UUID\t/datadrive\text4\tdefaults,nofail\t1\t2\nUUID|" /etc/fstab > fstab.tmp + - mv fstab.tmp /etc/fstab + - mkdir -p /datadrive/gitlab-data + # Enable custom GitLab settings and run an initial configuration + - echo "Running initial configuration" + - gitlab-ctl reconfigure + # Set root password and don't prompt for it to be reset when web app first loaded + - | + echo "user = User.find_by(username: 'root');user.password=user.password_confirmation='';user.password_automatically_set=false;user.save!;exit;" | gitlab-rails console -e production + # Turn off user account creation + - | + gitlab-rails runner "ApplicationSetting.last.update_attributes(signup_enabled: false)" + # Restrict login to SHM domain (must be done AFTER GitLab update) + - | + gitlab-rails runner "ApplicationSetting.last.update_attributes(domain_whitelist: [''])" + # Create user for ingressing external git repos + - | + echo "user = User.create(:username => '', :password => '', :password_confirmation => '', :email =>'@', :skip_confirmation => true, :name => '');user.save!;exit;" | gitlab-rails console -e production + # Create a API token for the ingress user created above + - | + echo "user = User.find_by(username: '');user.personal_access_tokens.create(name: 'apitoken', token_digest: Gitlab::CryptoHelper.sha256(''), impersonation: false, scopes: [:api]);exit;" | gitlab-rails console -e production + # Reload GitLab configuration and restart GitLab + - gitlab-ctl reconfigure + - gitlab-ctl restart + # -------------------------------- + # Create SSH key + # -------------------------------- + - | + mkdir -p /home//.ssh; + ssh-keygen -t ed25519 -C 'gitlab' -N '' -f /home//.ssh/id_ed25519; + chown : "/home//.ssh/id_ed25519" + chown : "/home//.ssh/id_ed25519.pub" + # -------------------------------- + # SETUP ACCESS TO GITLAB EXTERNAL + # -------------------------------- + - echo "Configuring access to gitlab external" + # Change ownership of secrets to + - | + chown : "/home//.secrets/gitlab-external-api-token"; + chown : "/home//.secrets/gitlab-external-ip-address"; + chown : "/home//.secrets/gitlab-external-username"; + chown : "/home//.secrets/gitlab-external-user-email"; + # Create SSH key for gitlab internal access, add gitlab internal to known hosts + - | + key=$(cat /home//.ssh/id_ed25519.pub); + curl --header 'Authorization: Bearer ' --header 'Content-Type:application/json' --data "{\"key\": \"$key\", \"title\": \"ExternalAPIUser\"}" /api/v4/user/keys; + ssh-keyscan -H >> /home//.ssh/known_hosts; + chown : "/home//.ssh/known_hosts" + # -------------------------------- + # SETUP ACCESS TO GITLAB INTERNAL + # -------------------------------- + - echo "Configuring access to gitlab internal" # Change ownership of secrets to - | chown : "/home//.secrets/gitlab-internal-api-token"; chown : "/home//.secrets/gitlab-internal-ip-address"; + chown : "/home//.secrets/gitlab-internal-username"; + chown : "/home//.secrets/gitlab-internal-user-email"; # Create SSH key for gitlab internal access, add gitlab internal to known hosts - | - echo "Configuring SSH for gitlab internal"; - mkdir -p /home//.ssh; - ssh-keygen -t ed25519 -C 'gitlab-internal' -N '' -f /home//.ssh/id_ed25519; - chown : "/home//.ssh/id_ed25519" - chown : "/home//.ssh/id_ed25519.pub" key=$(cat /home//.ssh/id_ed25519.pub); - curl --header 'Authorization: Bearer ' --header 'Content-Type:application/json' --data "{\"key\": \"$key\", \"title\": \"external-ingress\"}" /api/v4/user/keys; - ssh-keyscan -H >> /home//.ssh/known_hosts; + curl --header 'Authorization: Bearer ' --header 'Content-Type:application/json' --data "{\"key\": \"$key\", \"title\": \"InternalAPIUser\"}" /api/v4/user/keys; + ssh-keyscan -H >> /home//.ssh/known_hosts; chown : "/home//.ssh/known_hosts" + # -------------------------------- + # FINAL SETUP + # -------------------------------- + - echo "Finishing setup" # Configure global git user to be gitlab internal user - | - echo "Configuring git global user"; HOME=/home/ git config --global user.name ''; - HOME=/home/ git config --global user.email '@'; + HOME=/home/ git config --global user.email '@'; # Give ownership of their home directory - | chown -R : "/home/"; diff --git a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-internal.template.yaml b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-internal.template.yaml index 789f868345..0081b29f58 100644 --- a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-internal.template.yaml +++ b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-internal.template.yaml @@ -85,10 +85,10 @@ runcmd: gitlab-rails runner "ApplicationSetting.last.update_attributes(domain_whitelist: [''])" # Create user for ingressing external git repos - | - echo "user = User.create(:username => '', :password => '', :password_confirmation => '', :email =>'@', :skip_confirmation => true, :name => '');user.save!;exit;" | gitlab-rails console -e production + echo "user = User.create(:username => '', :password => '', :password_confirmation => '', :email =>'@', :skip_confirmation => true, :name => '');user.save!;exit;" | gitlab-rails console -e production # Create a API token for the ingress user created above - | - echo "user = User.find_by(username: '');user.personal_access_tokens.create(name: 'apitoken', token_digest: Gitlab::CryptoHelper.sha256(''), impersonation: false, scopes: [:api]);exit;" | gitlab-rails console -e production + echo "user = User.find_by(username: '');user.personal_access_tokens.create(name: 'apitoken', token_digest: Gitlab::CryptoHelper.sha256(''), impersonation: false, scopes: [:api]);exit;" | gitlab-rails console -e production # Reload GitLab configuration and restart GitLab - gitlab-ctl reconfigure - gitlab-ctl restart diff --git a/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 b/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 index 2b9f0b78c4..8e4370f4a6 100644 --- a/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 +++ b/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 @@ -25,11 +25,14 @@ $sreAdminPassword = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name $gitlabRootPassword = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -SecretName $config.sre.keyVault.secretNames.gitlabRootPassword $gitlabUserPassword = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -SecretName $config.sre.keyVault.secretNames.gitlabUserPassword $gitlabLdapPassword = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -SecretName $config.sre.keyVault.secretNames.gitlabLdapPassword -$gitlabExternalUsername = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -SecretName $config.sre.keyVault.secretNames.gitlabExternalUsername -DefaultValue "external" +$gitlabExternalUsername = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -SecretName $config.sre.keyVault.secretNames.gitlabExternalUsername -DefaultValue "ingress" $gitlabExternalPassword = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -SecretName $config.sre.keyVault.secretNames.gitlabExternalPassword $gitlabExternalAPIToken = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -SecretName $config.sre.keyVault.secretNames.gitlabExternalAPIToken $hackmdUserPassword = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -SecretName $config.sre.keyVault.secretNames.hackmdUserPassword $hackmdLdapPassword = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -SecretName $config.sre.keyVault.secretNames.hackmdLdapPassword +$gitlabInternalUsername = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -SecretName $config.sre.keyVault.secretNames.gitlabInternalUsername -DefaultValue "ingress" +$gitlabInternalPassword = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -SecretName $config.sre.keyVault.secretNames.gitlabInternalPassword +$gitlabInternalAPIToken = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -SecretName $config.sre.keyVault.secretNames.gitlabInternalAPIToken # Set up the NSG for the webapps @@ -81,9 +84,9 @@ $gitlabCloudInit = $gitlabCloudInitTemplate.Replace('', $shmDcFq Replace('',$gitlabFqdn). Replace('',$gitlabRootPassword). Replace('',$config.shm.domain.fqdn). - Replace('',$gitlabExternalUsername). - Replace('',$gitlabExternalPassword). - Replace('',$gitlabExternalAPIToken) + Replace('',$gitlabInternalUsername). + Replace('',$gitlabInternalPassword). + Replace('',$gitlabInternalAPIToken) # Encode as base64 $gitlabCloudInitEncoded = [System.Convert]::ToBase64String([System.Text.Encoding]::UTF8.GetBytes($gitlabCloudInit)) @@ -186,12 +189,31 @@ $vmNic = Deploy-VirtualMachineNIC -Name "$vmName-NIC" -ResourceGroupName $config # ------------------------------ $bootDiagnosticsAccount = Deploy-StorageAccount -Name $config.sre.storage.bootdiagnostics.accountName -ResourceGroupName $config.sre.storage.bootdiagnostics.rg -Location $config.sre.location + +$shmDcFqdn = ($config.shm.dc.hostname + "." + $config.shm.domain.fqdn) +$gitlabFqdn = $config.sre.webapps.gitlab.external.hostname + "." + $config.sre.domain.fqdn +$gitlabLdapUserDn = "CN=" + $config.sre.users.ldap.gitlab.name + "," + $config.shm.domain.serviceOuPath +$gitlabUserFilter = "(&(objectClass=user)(memberOf=CN=" + $config.sre.domain.securityGroups.reviewUsers.name + "," + $config.shm.domain.securityOuPath + "))" + $gitlabExternalCloudInitTemplate = Join-Path $PSScriptRoot ".." "cloud_init" "cloud-init-gitlab-external.template.yaml" | Get-Item | Get-Content -Raw -$gitlabExternalCloudInit = $gitlabExternalCloudInitTemplate.Replace('',$config.sre.webapps.gitlab.internal.ip). - Replace('',$sreAdminUsername). - Replace('',$config.shm.domain.fqdn). - Replace('',$gitlabExternalUsername). - Replace('',$gitlabExternalAPIToken) +$gitlabExternalCloudInit = $gitlabExternalCloudInitTemplate.Replace('',$sreAdminUsername). + Replace('',$config.sre.webapps.gitlab.internal.ip). + Replace('',$config.shm.domain.fqdn). + Replace('',$gitlabInternalUsername). + Replace('',$gitlabInternalAPIToken). + Replace('', $shmDcFqdn). + Replace('', $gitlabLdapUserDn). + Replace('',$gitlabLdapPassword). + Replace('',$config.shm.domain.userOuPath). + Replace('',$gitlabUserFilter). + Replace('',$config.sre.webapps.gitlab.external.ip). + Replace('',$config.sre.webapps.gitlab.external.hostname). + Replace('',$gitlabFqdn). + Replace('',$gitlabRootPassword). + Replace('',$config.shm.domain.fqdn). + Replace('',$gitlabExternalUsername). + Replace('',$gitlabExternalPassword). + Replace('',$gitlabExternalAPIToken) $params = @{ Name = $vmName diff --git a/environment_configs/full/sre_mortestsandbox_full_config.json b/environment_configs/full/sre_mortestsandbox_full_config.json index 4f13e38136..72f6e708e9 100644 --- a/environment_configs/full/sre_mortestsandbox_full_config.json +++ b/environment_configs/full/sre_mortestsandbox_full_config.json @@ -229,11 +229,14 @@ "gitlabLdapPassword": "sre-sandbox-gitlab-ldap-password", "gitlabRootPassword": "sre-sandbox-gitlab-root-password", "gitlabUserPassword": "sre-sandbox-gitlab-user-password", + "gitlabInternalUsername": "sre-sandbox-gitlab-internal-username", + "gitlabInternalPassword": "sre-sandbox-gitlab-internal-password", + "gitlabInternalAPIToken": "sre-sandbox-gitlab-internal-api-token", + "hackmdLdapPassword": "sre-sandbox-hackmd-ldap-password", + "hackmdUserPassword": "sre-sandbox-hackmd-user-password", "gitlabExternalUsername": "sre-sandbox-gitlab-external-username", "gitlabExternalPassword": "sre-sandbox-gitlab-external-password", "gitlabExternalAPIToken": "sre-sandbox-gitlab-external-api-token", - "hackmdLdapPassword": "sre-sandbox-hackmd-ldap-password", - "hackmdUserPassword": "sre-sandbox-hackmd-user-password", "letsEncryptCertificate": "sre-sandbox-lets-encrypt-certificate", "npsSecret": "sre-sandbox-nps-secret", "rdsAdminPassword": "sre-sandbox-rdsvm-admin-password", From b9d5788f809b0da59f62203970dd12bf0bd75a4e Mon Sep 17 00:00:00 2001 From: James Robinson Date: Thu, 30 Apr 2020 13:39:56 +0100 Subject: [PATCH 012/155] Updated SRE teardown script --- deployment/administration/SHM_Teardown.ps1 | 5 -- deployment/common/Configuration.psm1 | 2 +- ...te.ps1 => Create_SRE_Users_And_Groups.ps1} | 50 +++++++++---------- ...te.ps1 => Remove_SRE_Users_And_Groups.ps1} | 24 ++++++--- .../setup/Add_SRE_Data_To_SHM.ps1 | 44 ++++++++-------- .../setup/Remove_SRE_Data_From_SHM.ps1 | 12 +++-- 6 files changed, 72 insertions(+), 65 deletions(-) rename deployment/secure_research_environment/remote/configure_shm_dc/scripts/{Create_New_SRE_User_Service_Accounts_Remote.ps1 => Create_SRE_Users_And_Groups.ps1} (98%) rename deployment/secure_research_environment/remote/configure_shm_dc/scripts/{Remove_Users_And_Groups_Remote.ps1 => Remove_SRE_Users_And_Groups.ps1} (86%) diff --git a/deployment/administration/SHM_Teardown.ps1 b/deployment/administration/SHM_Teardown.ps1 index d711ac2023..f5be749fe1 100644 --- a/deployment/administration/SHM_Teardown.ps1 +++ b/deployment/administration/SHM_Teardown.ps1 @@ -71,11 +71,6 @@ $adDnsRecordname = "@" Add-LogMessage -Level Info "[ ] Removing '$adDnsRecordname' TXT record from SHM $shmId DNS zone ($shmDomain)" Remove-AzDnsRecordSet -Name $adDnsRecordname -RecordType TXT -ZoneName $shmDomain -ResourceGroupName $dnsResourceGroup $success = $? -# # RDS ACME record -# $rdsAcmeDnsRecordname = "_acme-challenge" -# Add-LogMessage -Level Info "[ ] Removing '$rdsAcmeDnsRecordname' TXT record from SRE $sreId DNS zone ($shmDomain)" -# Remove-AzDnsRecordSet -Name $rdsAcmeDnsRecordname -RecordType TXT -ZoneName $shmDomain -ResourceGroupName $dnsResourceGroup -# $success = $success -and $? # Print success/failure message if ($success) { Add-LogMessage -Level Success "Record removal succeeded" diff --git a/deployment/common/Configuration.psm1 b/deployment/common/Configuration.psm1 index ede35744bf..4ed8e7611c 100644 --- a/deployment/common/Configuration.psm1 +++ b/deployment/common/Configuration.psm1 @@ -439,7 +439,7 @@ function Add-SreConfig { researchers = [ordered]@{ test = [ordered]@{ name = "$($config.sre.domain.netbiosName) Test Researcher" - samAccountName = "testresrch$($sreConfigBase.sreId)".ToLower() | TrimToLength 20 + samAccountName = "researcher$($sreConfigBase.sreId)".ToLower() | TrimToLength 20 } } } diff --git a/deployment/secure_research_environment/remote/configure_shm_dc/scripts/Create_New_SRE_User_Service_Accounts_Remote.ps1 b/deployment/secure_research_environment/remote/configure_shm_dc/scripts/Create_SRE_Users_And_Groups.ps1 similarity index 98% rename from deployment/secure_research_environment/remote/configure_shm_dc/scripts/Create_New_SRE_User_Service_Accounts_Remote.ps1 rename to deployment/secure_research_environment/remote/configure_shm_dc/scripts/Create_SRE_Users_And_Groups.ps1 index 91e181ab59..6812163f24 100644 --- a/deployment/secure_research_environment/remote/configure_shm_dc/scripts/Create_New_SRE_User_Service_Accounts_Remote.ps1 +++ b/deployment/secure_research_environment/remote/configure_shm_dc/scripts/Create_SRE_Users_And_Groups.ps1 @@ -5,33 +5,33 @@ # job, but this does not seem to have an immediate effect # Fror details, see https://docs.microsoft.com/en-gb/azure/virtual-machines/windows/run-command param( - [String]$sreFqdn, - [String]$shmFqdn, - [String]$researchUserSgName, + [String]$dataMountName, + [String]$dataMountPasswordEncrypted, + [String]$dataMountSamAccountName, + [String]$dsvmName, + [String]$dsvmPasswordEncrypted, + [String]$dsvmSamAccountName, + [String]$gitlabName, + [String]$gitlabPasswordEncrypted, + [String]$gitlabSamAccountName, + [String]$hackmdName, + [String]$hackmdPasswordEncrypted, + [String]$hackmdSamAccountName, + [String]$ldapUserSgName, + [String]$researchUserOuPath, [String]$researchUserSgDescription, - [String]$reviewUserSgName, + [String]$researchUserSgName, [String]$reviewUserSgDescription, - [String]$sqlAdminSgName, - [String]$sqlAdminSgDescription, - [String]$ldapUserSgName, + [String]$reviewUserSgName, [String]$securityOuPath, [String]$serviceOuPath, - [String]$researchUserOuPath, - [String]$hackmdSamAccountName, - [String]$hackmdName, - [String]$hackmdPasswordEncrypted, - [String]$gitlabSamAccountName, - [String]$gitlabName, - [String]$gitlabPasswordEncrypted, - [String]$dsvmSamAccountName, - [String]$dsvmName, - [String]$dsvmPasswordEncrypted, - [String]$dataMountSamAccountName, - [String]$dataMountName, - [String]$dataMountPasswordEncrypted, - [String]$testResearcherSamAccountName, + [String]$shmFqdn, + [String]$sqlAdminSgDescription, + [String]$sqlAdminSgName, + [String]$sreFqdn, [String]$testResearcherName, - [String]$testResearcherPasswordEncrypted + [String]$testResearcherPasswordEncrypted, + [String]$testResearcherSamAccountName ) function New-SreGroup($name, $description, $path, $groupCategory, $groupScope) { @@ -86,10 +86,10 @@ New-SreGroup -name $reviewUserSgName -description $reviewUserSgDescription -Path New-SreGroup -name $sqlAdminSgName -description $sqlAdminSgDescription -Path $securityOuPath -GroupScope Global -GroupCategory Security # Create Service Accounts for SRE -New-SreUser -samAccountName $hackmdSamAccountName -name $hackmdName -path $serviceOuPath -passwordSecureString $hackmdPasswordSecureString -New-SreUser -samAccountName $gitlabSamAccountName -name $gitlabName -path $serviceOuPath -passwordSecureString $gitlabPasswordSecureString -New-SreUser -samAccountName $dsvmSamAccountName -name $dsvmName -path $serviceOuPath -passwordSecureString $dsvmPasswordSecureString New-SreUser -samAccountName $dataMountSamAccountName -name $dataMountName -path $serviceOuPath -passwordSecureString $dataMountPasswordSecureString +New-SreUser -samAccountName $dsvmSamAccountName -name $dsvmName -path $serviceOuPath -passwordSecureString $dsvmPasswordSecureString +New-SreUser -samAccountName $gitlabSamAccountName -name $gitlabName -path $serviceOuPath -passwordSecureString $gitlabPasswordSecureString +New-SreUser -samAccountName $hackmdSamAccountName -name $hackmdName -path $serviceOuPath -passwordSecureString $hackmdPasswordSecureString New-SreUser -samAccountName $testResearcherSamAccountName -name $testResearcherName -path $researchUserOuPath -passwordSecureString $testResearcherPasswordSecureString # Add users to the relevant security groups diff --git a/deployment/secure_research_environment/remote/configure_shm_dc/scripts/Remove_Users_And_Groups_Remote.ps1 b/deployment/secure_research_environment/remote/configure_shm_dc/scripts/Remove_SRE_Users_And_Groups.ps1 similarity index 86% rename from deployment/secure_research_environment/remote/configure_shm_dc/scripts/Remove_Users_And_Groups_Remote.ps1 rename to deployment/secure_research_environment/remote/configure_shm_dc/scripts/Remove_SRE_Users_And_Groups.ps1 index 92e9d9c118..c63d2e7e35 100644 --- a/deployment/secure_research_environment/remote/configure_shm_dc/scripts/Remove_Users_And_Groups_Remote.ps1 +++ b/deployment/secure_research_environment/remote/configure_shm_dc/scripts/Remove_SRE_Users_And_Groups.ps1 @@ -5,16 +5,20 @@ # job, but this does not seem to have an immediate effect # For details, see https://docs.microsoft.com/en-gb/azure/virtual-machines/windows/run-command param( - [String]$sreId, - [String]$testResearcherSamAccountName, + [String]$dataMountSamAccountName, [String]$dsvmLdapSamAccountName, [String]$gitlabLdapSamAccountName, [String]$hackmdLdapSamAccountName, - [String]$sreResearchUserSG, [String]$rdsDataserverVMName, [String]$rdsGatewayVMName, [String]$rdsSessionHostAppsVMName, - [String]$rdsSessionHostDesktopVMName + [String]$rdsSessionHostDesktopVMName, + [String]$rdsSessionHostReviewVMName, + [String]$researchUserSgName, + [String]$reviewUserSgName, + [String]$sqlAdminSgName, + [String]$sreId, + [String]$testResearcherSamAccountName ) function Remove-SreUser($samAccountName) { @@ -63,22 +67,26 @@ function Remove-SreGroup($groupName) { } # Remove users -Remove-SreUser $testResearcherSamAccountName +Remove-SreUser $dataMountSamAccountName Remove-SreUser $dsvmLdapSamAccountName Remove-SreUser $gitlabLdapSamAccountName Remove-SreUser $hackmdLdapSamAccountName +Remove-SreUser $testResearcherSamAccountName + +# Remove groups +Remove-SreGroup $researchUserSgName +Remove-SreGroup $reviewUserSgName +Remove-SreGroup $sqlAdminSgName # Remove service computers Remove-SreComputer $rdsDataserverVMName Remove-SreComputer $rdsGatewayVMName Remove-SreComputer $rdsSessionHostAppsVMName Remove-SreComputer $rdsSessionHostDesktopVMName +Remove-SreComputer $rdsSessionHostDesktopVMName # Remove DSVMs $dsvmPrefix = "SRE-$sreId".Replace(".","-").ToUpper() foreach ($dsvm in $(Get-ADComputer -Filter "Name -like '$dsvmPrefix*'")) { Remove-SreComputer $dsvm.Name } - -# Remove groups -Remove-SreGroup $sreResearchUserSG diff --git a/deployment/secure_research_environment/setup/Add_SRE_Data_To_SHM.ps1 b/deployment/secure_research_environment/setup/Add_SRE_Data_To_SHM.ps1 index f10036e939..92d6dd0a02 100644 --- a/deployment/secure_research_environment/setup/Add_SRE_Data_To_SHM.ps1 +++ b/deployment/secure_research_environment/setup/Add_SRE_Data_To_SHM.ps1 @@ -49,35 +49,35 @@ $testResearcherPasswordEncrypted = ConvertTo-SecureString $testResearcherPasswor # ------------------------------- Add-LogMessage -Level Info "[ ] Adding SRE users and groups to SHM..." $_ = Set-AzContext -Subscription $config.shm.subscriptionName -$scriptPath = Join-Path $PSScriptRoot ".." "remote" "configure_shm_dc" "scripts" "Create_New_SRE_User_Service_Accounts_Remote.ps1" +$scriptPath = Join-Path $PSScriptRoot ".." "remote" "configure_shm_dc" "scripts" "Create_SRE_Users_And_Groups.ps1" $params = @{ - shmFqdn = "`"$($config.shm.domain.fqdn)`"" - sreFqdn = "`"$($config.sre.domain.fqdn)`"" - researchUserSgName = "`"$($config.sre.domain.securityGroups.researchUsers.name)`"" + dataMountName = "`"$($config.sre.users.datamount.name)`"" + dataMountPasswordEncrypted = $dataMountPasswordEncrypted + dataMountSamAccountName = "`"$($config.sre.users.datamount.samAccountName)`"" + dsvmName = "`"$($config.sre.users.ldap.dsvm.name)`"" + dsvmPasswordEncrypted = $dsvmPasswordEncrypted + dsvmSamAccountName = "`"$($config.sre.users.ldap.dsvm.samAccountName)`"" + gitlabName = "`"$($config.sre.users.ldap.gitlab.name)`"" + gitlabPasswordEncrypted = $gitlabPasswordEncrypted + gitlabSamAccountName = "`"$($config.sre.users.ldap.gitlab.samAccountName)`"" + hackmdName = "`"$($config.sre.users.ldap.hackmd.name)`"" + hackmdPasswordEncrypted = $hackmdPasswordEncrypted + hackmdSamAccountName = "`"$($config.sre.users.ldap.hackmd.samAccountName)`"" + ldapUserSgName = "`"$($config.shm.domain.securityGroups.dsvmLdapUsers.name)`"" + researchUserOuPath = "`"$($config.shm.domain.userOuPath)`"" researchUserSgDescription = "`"$($config.sre.domain.securityGroups.researchUsers.description)`"" - reviewUserSgName = "`"$($config.sre.domain.securityGroups.reviewUsers.name)`"" + researchUserSgName = "`"$($config.sre.domain.securityGroups.researchUsers.name)`"" reviewUserSgDescription = "`"$($config.sre.domain.securityGroups.reviewUsers.description)`"" - sqlAdminSgName = "`"$($config.sre.domain.securityGroups.sqlAdmins.name)`"" - sqlAdminSgDescription = "`"$($config.sre.domain.securityGroups.sqlAdmins.description)`"" - ldapUserSgName = "`"$($config.shm.domain.securityGroups.dsvmLdapUsers.name)`"" + reviewUserSgName = "`"$($config.sre.domain.securityGroups.reviewUsers.name)`"" securityOuPath = "`"$($config.shm.domain.securityOuPath)`"" serviceOuPath = "`"$($config.shm.domain.serviceOuPath)`"" - researchUserOuPath = "`"$($config.shm.domain.userOuPath)`"" - hackmdSamAccountName = "`"$($config.sre.users.ldap.hackmd.samAccountName)`"" - hackmdName = "`"$($config.sre.users.ldap.hackmd.name)`"" - hackmdPasswordEncrypted = $hackmdPasswordEncrypted - gitlabSamAccountName = "`"$($config.sre.users.ldap.gitlab.samAccountName)`"" - gitlabName = "`"$($config.sre.users.ldap.gitlab.name)`"" - gitlabPasswordEncrypted = $gitlabPasswordEncrypted - dsvmSamAccountName = "`"$($config.sre.users.ldap.dsvm.samAccountName)`"" - dsvmName = "`"$($config.sre.users.ldap.dsvm.name)`"" - dsvmPasswordEncrypted = $dsvmPasswordEncrypted - dataMountSamAccountName = "`"$($config.sre.users.datamount.samAccountName)`"" - dataMountName = "`"$($config.sre.users.datamount.name)`"" - dataMountPasswordEncrypted = $dataMountPasswordEncrypted - testResearcherSamAccountName = "`"$($config.sre.users.researchers.test.samAccountName)`"" + shmFqdn = "`"$($config.shm.domain.fqdn)`"" + sqlAdminSgDescription = "`"$($config.sre.domain.securityGroups.sqlAdmins.description)`"" + sqlAdminSgName = "`"$($config.sre.domain.securityGroups.sqlAdmins.name)`"" + sreFqdn = "`"$($config.sre.domain.fqdn)`"" testResearcherName = "`"$($config.sre.users.researchers.test.name)`"" testResearcherPasswordEncrypted = $testResearcherPasswordEncrypted + testResearcherSamAccountName = "`"$($config.sre.users.researchers.test.samAccountName)`"" } $result = Invoke-RemoteScript -Shell "PowerShell" -ScriptPath $scriptPath -VMName $config.shm.dc.vmName -ResourceGroupName $config.shm.dc.rg -Parameter $params Write-Output $result.Value diff --git a/deployment/secure_research_environment/setup/Remove_SRE_Data_From_SHM.ps1 b/deployment/secure_research_environment/setup/Remove_SRE_Data_From_SHM.ps1 index 7e92a1e9c4..19dd227200 100644 --- a/deployment/secure_research_environment/setup/Remove_SRE_Data_From_SHM.ps1 +++ b/deployment/secure_research_environment/setup/Remove_SRE_Data_From_SHM.ps1 @@ -72,18 +72,22 @@ if ($sreResources -or $sreResourceGroups) { # Remove SRE users and groups from SHM DC # --------------------------------------- Add-LogMessage -Level Info "Removing SRE users and groups from SHM DC..." - $scriptPath = Join-Path $PSScriptRoot ".." "remote" "configure_shm_dc" "scripts" "Remove_Users_And_Groups_Remote.ps1" -Resolve + $scriptPath = Join-Path $PSScriptRoot ".." "remote" "configure_shm_dc" "scripts" "Remove_SRE_Users_And_Groups.ps1" -Resolve $params = @{ - sreId = "`"$($config.sre.id)`"" - testResearcherSamAccountName = "`"$($config.sre.users.researchers.test.samAccountName)`"" + dataMountSamAccountName = "`"$($config.sre.users.datamount.samAccountName)`"" dsvmLdapSamAccountName = "`"$($config.sre.users.ldap.dsvm.samAccountName)`"" gitlabLdapSamAccountName = "`"$($config.sre.users.ldap.gitlab.samAccountName)`"" hackmdLdapSamAccountName = "`"$($config.sre.users.ldap.hackmd.samAccountName)`"" - sreResearchUserSG = "`"$($config.sre.domain.securityGroups.researchUsers.name)`"" rdsDataserverVMName = "`"$($config.sre.dataserver.vmName)`"" rdsGatewayVMName = "`"$($config.sre.rds.gateway.vmName)`"" rdsSessionHostAppsVMName = "`"$($config.sre.rds.sessionHost1.vmName)`"" rdsSessionHostDesktopVMName = "`"$($config.sre.rds.sessionHost2.vmName)`"" + rdsSessionHostReviewVMName = "`"$($config.sre.rds.sessionHost3.vmName)`"" + researchUserSgName = "`"$($config.sre.domain.securityGroups.researchUsers.name)`"" + reviewUserSgName = "`"$($config.sre.domain.securityGroups.reviewUsers.name)`"" + sqlAdminSgName = "`"$($config.sre.domain.securityGroups.sqlAdmins.name)`"" + sreId = "`"$($config.sre.id)`"" + testResearcherSamAccountName = "`"$($config.sre.users.researchers.test.samAccountName)`"" } $result = Invoke-RemoteScript -Shell "PowerShell" -ScriptPath $scriptPath -VMName $config.shm.dc.vmName -ResourceGroupName $config.shm.dc.rg -Parameter $params Write-Output $result.Value From f8c415a6c467fcc06a5edd984ac81553d3bd4f90 Mon Sep 17 00:00:00 2001 From: James Robinson Date: Thu, 30 Apr 2020 16:05:03 +0100 Subject: [PATCH 013/155] Removed cw20 configs as we are using testa now --- .../core/sre_testasandbox_core_config.json | 2 +- .../full/sre_testasandbox_full_config.json | 50 ++++++++++++++++--- 2 files changed, 44 insertions(+), 8 deletions(-) diff --git a/environment_configs/core/sre_testasandbox_core_config.json b/environment_configs/core/sre_testasandbox_core_config.json index 975e7fd9a8..a1babd179e 100644 --- a/environment_configs/core/sre_testasandbox_core_config.json +++ b/environment_configs/core/sre_testasandbox_core_config.json @@ -7,7 +7,7 @@ "domain": "sandbox.testa.dsgroupdev.co.uk", "netbiosName": "SANDBOX", "ipPrefix": "10.150.0", - "rdsAllowedSources": "default", + "rdsAllowedSources": "Internet", "rdsInternetAccess": "default", "computeVmImageType": "Ubuntu", "computeVmImageVersion": "0.2.2020050300" diff --git a/environment_configs/full/sre_testasandbox_full_config.json b/environment_configs/full/sre_testasandbox_full_config.json index 0a3445dc41..d1050c5b92 100644 --- a/environment_configs/full/sre_testasandbox_full_config.json +++ b/environment_configs/full/sre_testasandbox_full_config.json @@ -152,6 +152,10 @@ "researchUsers": { "name": "SG SANDBOX Research Users", "description": "SG SANDBOX Research Users" + }, + "reviewUsers": { + "name": "SG SANDBOX Review Users", + "description": "SG SANDBOX Review Users" } } }, @@ -182,12 +186,21 @@ "prefix": "10.150.3", "nsg": "dbingress", "cidr": "10.150.3.0/24" + }, + "airlock": { + "name": "AirlockSubnet", + "prefix": "10.150.4", + "nsg": "airlock", + "cidr": "10.150.4.0/24" } }, "nsg": { "data": {}, "dbingress": { "name": "NSG_SRE_SANDBOX_DB_INGRESS" + }, + "airlock": { + "name": "NSG_SRE_SANDBOX_AIRLOCK" } } }, @@ -216,8 +229,14 @@ "gitlabLdapPassword": "sre-sandbox-gitlab-ldap-password", "gitlabRootPassword": "sre-sandbox-gitlab-root-password", "gitlabUserPassword": "sre-sandbox-gitlab-user-password", + "gitlabInternalUsername": "sre-sandbox-gitlab-internal-username", + "gitlabInternalPassword": "sre-sandbox-gitlab-internal-password", + "gitlabInternalAPIToken": "sre-sandbox-gitlab-internal-api-token", "hackmdLdapPassword": "sre-sandbox-hackmd-ldap-password", "hackmdUserPassword": "sre-sandbox-hackmd-user-password", + "gitlabExternalUsername": "sre-sandbox-gitlab-external-username", + "gitlabExternalPassword": "sre-sandbox-gitlab-external-password", + "gitlabExternalAPIToken": "sre-sandbox-gitlab-external-api-token", "letsEncryptCertificate": "sre-sandbox-lets-encrypt-certificate", "npsSecret": "sre-sandbox-nps-secret", "rdsAdminPassword": "sre-sandbox-rdsvm-admin-password", @@ -249,7 +268,7 @@ "researchers": { "test": { "name": "SANDBOX Test Researcher", - "samAccountName": "testresrchsandbox" + "samAccountName": "researchersandbox" } } }, @@ -260,7 +279,7 @@ "vmSize": "Standard_DS2_v2", "nsg": "NSG_SRE_SANDBOX_RDS_SERVER", "networkRules": { - "allowedSources": "193.60.220.253", + "allowedSources": "Internet", "outboundInternet": "Deny" }, "hostname": "RDG-SRE-SANDBOX", @@ -282,6 +301,14 @@ "hostname": "DKP-SRE-SANDBOX", "fqdn": "DKP-SRE-SANDBOX.testa.dsgroupdev.co.uk", "ip": "10.150.1.248" + }, + "sessionHost3": { + "vmName": "REV-SRE-SANDBOX", + "vmSize": "Standard_DS2_v2", + "nsg": "NSG_SRE_SANDBOX_RDS_SESSION_HOSTS", + "hostname": "REV-SRE-SANDBOX", + "fqdn": "REV-SRE-SANDBOX.testa.dsgroupdev.co.uk", + "ip": "10.150.1.247" } }, "dataserver": { @@ -300,11 +327,20 @@ "rg": "RG_SRE_WEBAPPS", "nsg": "NSG_SRE_SANDBOX_WEBAPPS", "gitlab": { - "vmName": "GITLAB-SRE-SANDBOX", - "vmSize": "Standard_D2s_v3", - "hostname": "GITLAB-SRE-SANDBOX", - "fqdn": "GITLAB-SRE-SANDBOX.testa.dsgroupdev.co.uk", - "ip": "10.150.2.151" + "internal": { + "vmName": "GITLAB-INTERNAL-SRE-SANDBOX", + "vmSize": "Standard_D2s_v3", + "hostname": "GITLAB-INTERNAL-SRE-SANDBOX", + "fqdn": "GITLAB-INTERNAL-SRE-SANDBOX.testa.dsgroupdev.co.uk", + "ip": "10.150.2.151" + }, + "external": { + "vmName": "GITLAB-EXTERNAL-SRE-SANDBOX", + "vmSize": "Standard_D2s_v3", + "hostname": "GITLAB-EXTERNAL-SRE-SANDBOX", + "fqdn": "GITLAB-EXTERNAL-SRE-SANDBOX.testa.dsgroupdev.co.uk", + "ip": "10.150.4.151" + } }, "hackmd": { "vmName": "HACKMD-SRE-SANDBOX", From ccdc93f685d41a01b32b90a1203977e27f884b60 Mon Sep 17 00:00:00 2001 From: Jack Roberts Date: Fri, 1 May 2020 18:23:19 +0100 Subject: [PATCH 014/155] WIP script for monitoring and accepting merge requests on gitlab external Also add creation of "approved" and "unapproved" groups on gitlab external --- .../cloud-init-gitlab-external.template.yaml | 150 +++++++++++++++++- 1 file changed, 148 insertions(+), 2 deletions(-) diff --git a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml index 28306e04b0..bd929aaaad 100644 --- a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml +++ b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml @@ -25,6 +25,7 @@ apt: keyid: 3F01618A51312F3F write_files: + # Gitlab server config - path: /etc/gitlab/gitlab.rb permissions: "0600" content: | @@ -52,6 +53,7 @@ write_files: last_name: 'sn' EOS git_data_dirs({ "default" => { "path" => "/datadrive/gitdata" } }) + # Secrets for Gitlab Internal and External Access - path: "/home//.secrets/gitlab-internal-api-token" permissions: "0600" content: | @@ -84,6 +86,7 @@ write_files: permissions: "0600" content: | @ + # Script for ingressing repos to gitlab internal - path: "/home//update_from_whitelist.py" permissions: "0755" content: | @@ -143,7 +146,146 @@ write_files: os.system("git push gitlab-internal " + repo["gitlab_branch"]) - os.chdir("..") + os.chdir("..") + # Script for monitoring and accepting approved merge requests + - path: "/home//check_merge_requests.py" + permissions: "0755" + content: | + from datetime import datetime + import requests + from pathlib import Path + + def get_request(endpoint, headers, params=None): + if params is not None: + r = requests.get(endpoint, headers=headers, params=params) + else: + r = requests.get(endpoint, headers=headers) + if r.ok: + return r.json() + else: + raise ValueError(f"Request failed: code {r.status_code}, content {r.content}") + + def put_request(endpoint, headers, params=None): + if params is not None: + r = requests.put(endpoint, headers=headers, params=params) + else: + r = requests.put(endpoint, headers=headers) + if r.ok: + return r.json() + else: + raise ValueError(f"Request failed: code {r.status_code}, content {r.content}") + + def get_gitlab_config(server="external"): + home = str(Path.home()) + if server == "external": + with open(f"{home}/.secrets/gitlab-external-ip-address", "r") as f: + ip = f.readlines()[0].strip() + with open(f"{home}/.secrets/gitlab-external-api-token", "r") as f: + token = f.readlines()[0].strip() + elif server == "internal": + with open(f"{home}/.secrets/gitlab-internal-ip-address", "r") as f: + ip = f.readlines()[0].strip() + with open(f"{home}/.secrets/gitlab-internal-api-token", "r") as f: + token = f.readlines()[0].strip() + else: + raise ValueError("Server must be external or internal") + api_url = f"http://{ip}/api/v4/" + headers = {"Authorization": "Bearer " + token} + return {"api_url": api_url, "api_token": token, "ip": ip, "headers": headers} + + def get_group_id(group_name, config): + endpoint = config["api_url"] + "groups" + response = get_request(endpoint, headers=config["headers"]) + for group in response: + if group["name"] == group_name: + return group["id"] + raise ValueError(f"{group_name} not found in groups.") + + def get_project_name(project_id, config): + endpoint = config["api_url"] + f"projects/{project_id}" + response = get_request(endpoint, headers=config["headers"]) + return response["name"] + + def get_merge_requests_for_approval(config): + group = get_group_id("approved", config) + endpoint = config["api_url"] + f"/groups/{group}/merge_requests" + response = get_request( + endpoint, headers=config["headers"], params={"state": "opened"} + ) + return response + + def count_unresolved_mr_discussions(mr, config): + if mr["user_notes_count"] == 0: + return 0 + project_id = mr["project_id"] + mr_iid = mr["iid"] + endpoint = ( + config["api_url"] + f"projects/{project_id}/merge_requests/{mr_iid}/discussions" + ) + discussions = get_request(endpoint, headers=config["headers"]) + if len(discussions) == 0: + return 0 + else: + n_unresolved = 0 + for d in discussions: + for n in d["notes"]: + if n["resolvable"] is True and n["resolved"] is False: + n_unresolved += 1 + return n_unresolved + + def accept_merge_request(mr, config): + project_id = mr["project_id"] + mr_iid = mr["iid"] + endpoint = ( + config["api_url"] + f"projects/{project_id}/merge_requests/{mr_iid}/merge" + ) + result = put_request(endpoint, headers=config["headers"]) + return result + + def check_merge_requests(): + print("Starting run at", datetime.now().strftime("%d/%m/%Y %H:%M:%S")) + config = get_gitlab_config(server="external") + print("Getting open merge requests for approval") + merge_requests = get_merge_requests_for_approval(config) + print("Found", len(merge_requests), "open merge requests") + for i, mr in enumerate(merge_requests): + print("-" * 20) + print("Merge request", i, "out of", len(merge_requests)) + print("-" * 20) + print("Project:", get_project_name(mr["project_id"], config)) + print("Source Branch:", mr["source_branch"]) + print("Target Branch:", mr["target_branch"]) + status = mr["merge_status"] + print("Merge Status:", status) + wip = mr["work_in_progress"] + print("Work in Progress:", wip) + unresolved = count_unresolved_mr_discussions(mr, config) + print("Unresolved Discussions:", unresolved) + upvotes = mr["upvotes"] + print("Upvotes:", upvotes) + downvotes = mr["downvotes"] + print("Downvotes:", downvotes) + if ( + status == "can_be_merged" + and wip is False + and unresolved == 0 + and upvotes >= 2 + and downvotes == 0 + ): + print("Merge request has been approved. Proceeding with merge.") + result = accept_merge_request(mr, config) + if result["state"] == "merged": + print("Merge successful!") + # TODO CALL OLIVER'S FUNCTION + else: + print("Merge failed!") + else: + print("Merge request has not been approved. Skipping.") + print("-" * 20) + print("Run finished at", datetime.now().strftime("%d/%m/%Y %H:%M:%S")) + + if __name__ == "__main__": + check_merge_requests() runcmd: # -------------------------------- @@ -206,12 +348,16 @@ runcmd: chown : "/home//.secrets/gitlab-external-ip-address"; chown : "/home//.secrets/gitlab-external-username"; chown : "/home//.secrets/gitlab-external-user-email"; - # Create SSH key for gitlab internal access, add gitlab internal to known hosts + # Create SSH key for gitlab external access, add gitlab external to known hosts - | key=$(cat /home//.ssh/id_ed25519.pub); curl --header 'Authorization: Bearer ' --header 'Content-Type:application/json' --data "{\"key\": \"$key\", \"title\": \"ExternalAPIUser\"}" /api/v4/user/keys; ssh-keyscan -H >> /home//.ssh/known_hosts; chown : "/home//.ssh/known_hosts" + # Create groups for storing unapproved and approved repos + - | + curl --header "Authorization: Bearer " --data "name=approved&path=approved&visibility=public" /api/v4/groups; + curl --header "Authorization: Bearer " --data "name=unapproved&path=unapproved&visibility=public" /api/v4/groups # -------------------------------- # SETUP ACCESS TO GITLAB INTERNAL # -------------------------------- From fd62e1f256744c05cc8f190ac651b8dc2b857880 Mon Sep 17 00:00:00 2001 From: Jack Roberts Date: Mon, 4 May 2020 10:44:12 +0100 Subject: [PATCH 015/155] Merge requests script: Replace print statements with logging --- .../cloud-init-gitlab-external.template.yaml | 168 +++++++++++++----- 1 file changed, 119 insertions(+), 49 deletions(-) diff --git a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml index bd929aaaad..f9eef1a828 100644 --- a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml +++ b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml @@ -151,37 +151,60 @@ write_files: - path: "/home//check_merge_requests.py" permissions: "0755" content: | - from datetime import datetime import requests from pathlib import Path + import logging + from logging.handlers import RotatingFileHandler + + + logger = logging.getLogger("merge_requests_logger") + logger.setLevel(logging.INFO) + formatter = logging.Formatter("%(asctime)s [%(levelname)s] %(message)s") + f_handler = RotatingFileHandler( + "check_merge_requests.log", maxBytes=5 * 1024 * 1024, backupCount=10 + ) + f_handler.setFormatter(formatter) + c_handler = logging.StreamHandler() + c_handler.setFormatter(formatter) + logger.addHandler(f_handler) + logger.addHandler(c_handler) + + + def push_to_internal(project_url, project_name, target_branch, commit_sha): + # TODO Replace with Oliver's function + logger.info(f"Pushing {project_name} to gitlab internal") + def get_request(endpoint, headers, params=None): - if params is not None: - r = requests.get(endpoint, headers=headers, params=params) - else: - r = requests.get(endpoint, headers=headers) + r = requests.get(endpoint, headers=headers, params=params) if r.ok: return r.json() else: - raise ValueError(f"Request failed: code {r.status_code}, content {r.content}") + raise ValueError( + f"Request failed: URL {endpoint}, CODE {r.status_code}, CONTENT {r.content}" + ) + def put_request(endpoint, headers, params=None): - if params is not None: - r = requests.put(endpoint, headers=headers, params=params) - else: - r = requests.put(endpoint, headers=headers) + r = requests.put(endpoint, headers=headers, params=params) if r.ok: return r.json() else: - raise ValueError(f"Request failed: code {r.status_code}, content {r.content}") + raise ValueError( + f"Request failed: URL {endpoint}, CODE {r.status_code}, CONTENT {r.content}" + ) + def get_gitlab_config(server="external"): home = str(Path.home()) + if server == "external": - with open(f"{home}/.secrets/gitlab-external-ip-address", "r") as f: - ip = f.readlines()[0].strip() - with open(f"{home}/.secrets/gitlab-external-api-token", "r") as f: - token = f.readlines()[0].strip() + ip = "0.0.0.0" + token = "YuHW8ytg2RnzzmSMZyfc" + # with open(f"{home}/.secrets/gitlab-external-ip-address", "r") as f: + # ip = f.readlines()[0].strip() + # with open(f"{home}/.secrets/gitlab-external-api-token", "r") as f: + # token = f.readlines()[0].strip() elif server == "internal": with open(f"{home}/.secrets/gitlab-internal-ip-address", "r") as f: ip = f.readlines()[0].strip() @@ -189,10 +212,13 @@ write_files: token = f.readlines()[0].strip() else: raise ValueError("Server must be external or internal") + api_url = f"http://{ip}/api/v4/" headers = {"Authorization": "Bearer " + token} + return {"api_url": api_url, "api_token": token, "ip": ip, "headers": headers} + def get_group_id(group_name, config): endpoint = config["api_url"] + "groups" response = get_request(endpoint, headers=config["headers"]) @@ -201,10 +227,12 @@ write_files: return group["id"] raise ValueError(f"{group_name} not found in groups.") - def get_project_name(project_id, config): + + def get_project(project_id, config): endpoint = config["api_url"] + f"projects/{project_id}" - response = get_request(endpoint, headers=config["headers"]) - return response["name"] + project = get_request(endpoint, headers=config["headers"]) + return project + def get_merge_requests_for_approval(config): group = get_group_id("approved", config) @@ -214,6 +242,7 @@ write_files: ) return response + def count_unresolved_mr_discussions(mr, config): if mr["user_notes_count"] == 0: return 0 @@ -233,38 +262,58 @@ write_files: n_unresolved += 1 return n_unresolved + def accept_merge_request(mr, config): project_id = mr["project_id"] mr_iid = mr["iid"] endpoint = ( config["api_url"] + f"projects/{project_id}/merge_requests/{mr_iid}/merge" ) - result = put_request(endpoint, headers=config["headers"]) - return result + return put_request(endpoint, headers=config["headers"]) + def check_merge_requests(): - print("Starting run at", datetime.now().strftime("%d/%m/%Y %H:%M:%S")) - config = get_gitlab_config(server="external") - print("Getting open merge requests for approval") - merge_requests = get_merge_requests_for_approval(config) - print("Found", len(merge_requests), "open merge requests") + logger.info(f"STARTING RUN") + + try: + config = get_gitlab_config(server="external") + except Exception as e: + logger.critical(f"Failed to load gitlab secrets: {e}") + return + + logger.info("Getting open merge requests for approval") + try: + merge_requests = get_merge_requests_for_approval(config) + except Exception as e: + logger.critical(f"Failed to get merge requests: {e}") + return + logger.info(f"Found {len(merge_requests)} open merge requests") + for i, mr in enumerate(merge_requests): - print("-" * 20) - print("Merge request", i, "out of", len(merge_requests)) - print("-" * 20) - print("Project:", get_project_name(mr["project_id"], config)) - print("Source Branch:", mr["source_branch"]) - print("Target Branch:", mr["target_branch"]) - status = mr["merge_status"] - print("Merge Status:", status) - wip = mr["work_in_progress"] - print("Work in Progress:", wip) - unresolved = count_unresolved_mr_discussions(mr, config) - print("Unresolved Discussions:", unresolved) - upvotes = mr["upvotes"] - print("Upvotes:", upvotes) - downvotes = mr["downvotes"] - print("Downvotes:", downvotes) + logger.info("-" * 20) + logger.info(f"Merge request {i+1} out of {len(merge_requests)}") + try: + source_project = get_project(mr["source_project_id"], config) + logger.info(f"Source Project: {source_project['name_with_namespace']}") + logger.info(f"Source Branch: {mr['source_branch']}") + target_project = get_project(mr["project_id"], config) + logger.info(f"Target Project: {target_project['name_with_namespace']}") + logger.info(f"Target Branch: {mr['target_branch']}") + logger.info(f"Commit SHA: {mr['sha']}") + logger.info(f"Created At: {mr['created_at']}") + status = mr["merge_status"] + logger.info(f"Merge Status: {status}") + wip = mr["work_in_progress"] + logger.info(f"Work in Progress: {wip}") + unresolved = count_unresolved_mr_discussions(mr, config) + logger.info(f"Unresolved Discussions: {unresolved}") + upvotes = mr["upvotes"] + logger.info(f"Upvotes: {upvotes}") + downvotes = mr["downvotes"] + logger.info(f"Downvotes: {downvotes}") + except Exception as e: + logger.error(f"Failed to extract merge request details: {e}") + continue if ( status == "can_be_merged" and wip is False @@ -272,20 +321,41 @@ write_files: and upvotes >= 2 and downvotes == 0 ): - print("Merge request has been approved. Proceeding with merge.") - result = accept_merge_request(mr, config) + logger.info("Merge request has been approved. Proceeding with merge.") + try: + result = accept_merge_request(mr, config) + except Exception as e: + logger.error(f"Merge failed! {e}") + continue if result["state"] == "merged": - print("Merge successful!") - # TODO CALL OLIVER'S FUNCTION + logger.info(f"Merge successful! Merge SHA {result['merge_commit_sha']}") + try: + with open("accepted_merge_requests.log", "a") as f: + f.write( + f"{result['merged_at']}, {source_project['name_with_namespace']}, {mr['source_branch']}, {mr['sha']}, {target_project['name_with_namespace']}, {mr['target_branch']}, {result['merge_commit_sha']}\n" + ) + except Exception as e: + logger.error(f"Failed to log accepted merge request: {e}") + try: + push_to_internal( + target_project["ssh_url_to_repo"], + target_project["name"], + mr["target_branch"], + result["merge_commit_sha"], + ) + except Exception as e: + logger.error(f"Failed to push to internal: {e}") else: - print("Merge failed!") + logger.error(f"Merge failed! Merge status is {result['state']}") else: - print("Merge request has not been approved. Skipping.") - print("-" * 20) - print("Run finished at", datetime.now().strftime("%d/%m/%Y %H:%M:%S")) + logger.info("Merge request has not been approved. Skipping.") + logger.info(f"RUN FINISHED") + logger.info("=" * 30) + if __name__ == "__main__": check_merge_requests() + runcmd: # -------------------------------- From f139b2f57049b5f75a37eb9dd5ad4ff7151e92d2 Mon Sep 17 00:00:00 2001 From: Jack Roberts Date: Mon, 4 May 2020 10:47:33 +0100 Subject: [PATCH 016/155] Remove localhost token accidentally added in previous commit For local gitlab testing on my laptop only - not a secret valid for anything in safe haven. --- .../cloud-init-gitlab-external.template.yaml | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml index f9eef1a828..cb7d4a3e12 100644 --- a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml +++ b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml @@ -199,12 +199,10 @@ write_files: home = str(Path.home()) if server == "external": - ip = "0.0.0.0" - token = "YuHW8ytg2RnzzmSMZyfc" - # with open(f"{home}/.secrets/gitlab-external-ip-address", "r") as f: - # ip = f.readlines()[0].strip() - # with open(f"{home}/.secrets/gitlab-external-api-token", "r") as f: - # token = f.readlines()[0].strip() + with open(f"{home}/.secrets/gitlab-external-ip-address", "r") as f: + ip = f.readlines()[0].strip() + with open(f"{home}/.secrets/gitlab-external-api-token", "r") as f: + token = f.readlines()[0].strip() elif server == "internal": with open(f"{home}/.secrets/gitlab-internal-ip-address", "r") as f: ip = f.readlines()[0].strip() From 69df12d18c2891c64960c74d606412c6784149b0 Mon Sep 17 00:00:00 2001 From: James Robinson Date: Thu, 14 May 2020 12:32:53 +0100 Subject: [PATCH 017/155] Changed name of test researcher --- docs/deploy_shm_instructions.md | 4 +++- environment_configs/full/sre_testasandbox_full_config.json | 2 +- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/docs/deploy_shm_instructions.md b/docs/deploy_shm_instructions.md index 220dc81aba..306091536e 100644 --- a/docs/deploy_shm_instructions.md +++ b/docs/deploy_shm_instructions.md @@ -112,7 +112,9 @@ The following core SHM properties must be defined in a JSON file named `shm_`, where `` is the `Tenant ID` you copied from the AAD portal `Overview` page in the previous step. :warning: If you do not do this before running the next script, you will have to exit Powershell and start it again. + - **IMPORTANT** Ensure you are authenticated to the correct AAD within PowerShell using the command: `Connect-AzureAD -TenantId `, where `` is the `Tenant ID` you copied from the AAD portal `Overview` page in the previous step. + - :warning: If you do not do this before running the next script, you will have to exit Powershell and start it again. + - **Troubleshooting** If you get an error like `Connect-AzureAD: Could not load file or assembly 'Microsoft.IdentityModel.Clients.ActiveDirectory, Version=3.19.8.16603, Culture=neutral, PublicKeyToken=31bf3856ad364e35'. Could not find or load a specific file. (0x80131621)` then please open a new Powershell session and try again - Run `./Setup_SHM_AAD_Domain.ps1 -shmId `, where the SHM ID is the one specified in the config. - :warning: Due to delays with DNS propagation, occasionally the script may exhaust the maximum number of retries without managing to verify the domain. If this occurs, run the script again. If it exhausts the number of retries a second time, wait an hour and try again. diff --git a/environment_configs/full/sre_testasandbox_full_config.json b/environment_configs/full/sre_testasandbox_full_config.json index d1050c5b92..33afaa66cb 100644 --- a/environment_configs/full/sre_testasandbox_full_config.json +++ b/environment_configs/full/sre_testasandbox_full_config.json @@ -268,7 +268,7 @@ "researchers": { "test": { "name": "SANDBOX Test Researcher", - "samAccountName": "researchersandbox" + "samAccountName": "testresrchsandbox" } } }, From 49df9191b13f8ad661f98e6a39dad37bdab8f127 Mon Sep 17 00:00:00 2001 From: James Robinson Date: Fri, 1 May 2020 20:58:49 +0100 Subject: [PATCH 018/155] Updated RDS scripts --- .../scripts/Import_And_Install_Blobs.ps1 | 71 +++++++ .../create_rds/scripts/Import_Artifacts.ps1 | 51 ----- .../Install_Additional_Powershell_Modules.ps1 | 17 -- .../create_rds/scripts/Install_Packages.ps1 | 22 --- .../scripts/Set_OS_Locale_and_DNS.ps1 | 27 --- .../Deploy_RDS_Environment.template.ps1 | 5 +- .../setup/Setup_SRE_VNET_RDS.ps1 | 185 ++++++------------ .../full/sre_testasandbox_full_config.json | 2 +- 8 files changed, 133 insertions(+), 247 deletions(-) create mode 100644 deployment/secure_research_environment/remote/create_rds/scripts/Import_And_Install_Blobs.ps1 delete mode 100644 deployment/secure_research_environment/remote/create_rds/scripts/Import_Artifacts.ps1 delete mode 100644 deployment/secure_research_environment/remote/create_rds/scripts/Install_Additional_Powershell_Modules.ps1 delete mode 100644 deployment/secure_research_environment/remote/create_rds/scripts/Install_Packages.ps1 delete mode 100644 deployment/secure_research_environment/remote/create_rds/scripts/Set_OS_Locale_and_DNS.ps1 diff --git a/deployment/secure_research_environment/remote/create_rds/scripts/Import_And_Install_Blobs.ps1 b/deployment/secure_research_environment/remote/create_rds/scripts/Import_And_Install_Blobs.ps1 new file mode 100644 index 0000000000..8fceb4a427 --- /dev/null +++ b/deployment/secure_research_environment/remote/create_rds/scripts/Import_And_Install_Blobs.ps1 @@ -0,0 +1,71 @@ +# Don't make parameters mandatory as if there is any issue binding them, the script will prompt for them +# and remote execution will stall waiting for the non-present user to enter the missing parameter on the +# command line. This take up to 90 minutes to timeout, though you can try running resetState.cmd in +# C:\Packages\Plugins\Microsoft.CPlat.Core.RunCommandWindows\1.1.0 on the remote VM to cancel a stalled +# job, but this does not seem to have an immediate effect +# For details, see https://docs.microsoft.com/en-gb/azure/virtual-machines/windows/run-command +param( + [Parameter(HelpMessage = "Storage account name")] + [string]$storageAccountName, + [Parameter(HelpMessage = "Storage service")] + [string]$storageService, + [Parameter(HelpMessage = "File share or blob container name")] + [string]$shareOrContainerName, + [Parameter(HelpMessage = "SAS token with read/list rights to the artifacts storage blob container")] + [string]$sasToken, + [Parameter(HelpMessage = "Pipe separated list of remote file paths")] + [string]$pipeSeparatedRemoteFilePaths, + [Parameter(HelpMessage = "Absolute path to artifacts download directory")] + [string]$downloadDir +) + +# Deserialise blob names +$remoteFilePaths = $pipeSeparatedRemoteFilePaths.Split("|") + +# Clear any previously downloaded artifacts +Write-Host "Clearing all pre-existing files and folders from '$downloadDir'" +if (Test-Path -Path $downloadDir) { + Get-ChildItem $downloadDir -Recurse | Remove-Item -Recurse -Force +} else { + $_ = New-Item -ItemType directory -Path $downloadDir +} + +# Download artifacts +Write-Host "Downloading $($remoteFilePaths.Count) files to '$downloadDir'" +foreach ($remoteFilePath in $remoteFilePaths) { + # Ensure that local path exists + $localDir = Join-Path $downloadDir $(Split-Path -Parent $remoteFilePath) + if (-Not (Test-Path -Path $localDir)) { + $_ = New-Item -ItemType directory -Path $localDir + } + $fileName = $(Split-Path -Leaf $remoteFilePath) + $localFilePath = Join-Path $localDir $fileName + + # Get file from blob storage + $remoteUrl = "https://${storageAccountName}.${storageService}.core.windows.net/${shareOrContainerName}/${remoteFilePath}" + Write-Host " [ ] Fetching $remoteUrl..." + $_ = Invoke-WebRequest -Uri "${remoteUrl}${sasToken}" -OutFile $localFilePath + if ($?) { + Write-Host " [o] Succeeded" + } else { + Write-Host " [x] Failed!" + } + + # If this file is an msi/exe then install it + if ((Test-Path -Path $localFilePath) -And ($fileName -Match ".*\.(msi|exe)\b")) { + if ($fileName -like "*.msi") { + Write-Host " [ ] Installing $fileName..." + Start-Process $localFilePath -ArgumentList '/quiet' -Verbose -Wait + } elseif ($fileName -like "*WinSCP*exe") { + Write-Host " [ ] Installing $fileName..." + Start-Process $localFilePath -ArgumentList '/SILENT', '/ALLUSERS' -Verbose -Wait + } else { + continue + } + if ($?) { + Write-Host " [o] Succeeded" + } else { + Write-Host " [x] Failed!" + } + } +} diff --git a/deployment/secure_research_environment/remote/create_rds/scripts/Import_Artifacts.ps1 b/deployment/secure_research_environment/remote/create_rds/scripts/Import_Artifacts.ps1 deleted file mode 100644 index e7d403cc00..0000000000 --- a/deployment/secure_research_environment/remote/create_rds/scripts/Import_Artifacts.ps1 +++ /dev/null @@ -1,51 +0,0 @@ -# Don't make parameters mandatory as if there is any issue binding them, the script will prompt for them -# and remote execution will stall waiting for the non-present user to enter the missing parameter on the -# command line. This take up to 90 minutes to timeout, though you can try running resetState.cmd in -# C:\Packages\Plugins\Microsoft.CPlat.Core.RunCommandWindows\1.1.0 on the remote VM to cancel a stalled -# job, but this does not seem to have an immediate effect -# For details, see https://docs.microsoft.com/en-gb/azure/virtual-machines/windows/run-command -param( - [Parameter(Position=0, HelpMessage = "Storage account name")] - [string]$storageAccountName, - [Parameter(Position=1, HelpMessage = "Storage service")] - [string]$storageService, - [Parameter(Position=2, HelpMessage = "File share or blob container name")] - [string]$shareOrContainerName, - [Parameter(Position=3, HelpMessage = "SAS token with read/list rights to the artifacts storage blob container")] - [string]$sasToken, - [Parameter(Position=4, HelpMessage = "Pipe separated list of remote file paths")] - [string]$pipeSeparatedremoteFilePaths, - [Parameter(Position=5, HelpMessage = "Absolute path to artifacts download directory")] - [string]$downloadDir -) - -# Deserialise blob names -$remoteFilePaths = $pipeSeparatedremoteFilePaths.Split("|") - -# Clear any previously downloaded artifacts -Write-Host "Clearing all pre-existing files and folders from '$downloadDir'" -if(Test-Path -Path $downloadDir){ - Get-ChildItem $downloadDir -Recurse | Remove-Item -Recurse -Force -} else { - $_ = New-Item -ItemType directory -Path $downloadDir -} - -# Download artifacts -Write-Host "Downloading $numFiles files to '$downloadDir'" -foreach($remoteFilePath in $remoteFilePaths){ - $fileName = Split-Path -Leaf $remoteFilePath - $fileDirRel = Split-Path -Parent $remoteFilePath - $fileDirFull = Join-Path $downloadDir $fileDirRel - if(-not (Test-Path -Path $fileDirFull )){ - $_ = New-Item -ItemType directory -Path $fileDirFull - } - $filePath = Join-Path $fileDirFull $fileName - $remoteUrl = "https://$storageAccountName.$storageService.core.windows.net/$shareOrContainerName/$remoteFilePath"; - Write-Host " [ ] fetching $remoteUrl..." - $_ = Invoke-WebRequest -Uri "$remoteUrl$sasToken" -OutFile $filePath; - if ($?) { - Write-Host " [o] Succeeded" - } else { - Write-Host " [x] Failed!" - } -} \ No newline at end of file diff --git a/deployment/secure_research_environment/remote/create_rds/scripts/Install_Additional_Powershell_Modules.ps1 b/deployment/secure_research_environment/remote/create_rds/scripts/Install_Additional_Powershell_Modules.ps1 deleted file mode 100644 index 1db71e3400..0000000000 --- a/deployment/secure_research_environment/remote/create_rds/scripts/Install_Additional_Powershell_Modules.ps1 +++ /dev/null @@ -1,17 +0,0 @@ -# Check installed modules -Write-Host "List installed modules..." -Get-Module - -# Install module -Write-Host "Installing RDWebClientManagement..." -Set-PSRepository -Name PSGallery -InstallationPolicy Trusted -Install-Module -Name RDWebClientManagement -Force -AllowClobber -AcceptLicense -if ($?) { - Write-Host " [o] Succeeded" -} else { - Write-Host " [x] Failed!" -} - -# Check installed modules -Write-Host "List installed modules..." -Get-Module diff --git a/deployment/secure_research_environment/remote/create_rds/scripts/Install_Packages.ps1 b/deployment/secure_research_environment/remote/create_rds/scripts/Install_Packages.ps1 deleted file mode 100644 index 6854b296fa..0000000000 --- a/deployment/secure_research_environment/remote/create_rds/scripts/Install_Packages.ps1 +++ /dev/null @@ -1,22 +0,0 @@ -# Get list of packages -$packages = Get-ChildItem "C:\Installation\" -Write-Host "Preparing to install $($packages.Count) packages..." - -# Install each package -foreach ($package in $packages){ - Write-Host " [ ] installing $($package.FullName)..." - - if($package -like "*.msi") { - Start-Process $package.FullName -ArgumentList '/quiet' -Verbose -Wait - } else { - if($package -like "*WinSCP*exe") { - Start-Process $package.FullName -ArgumentList '/SILENT','/ALLUSERS' -Verbose -Wait - } - } - # Check installation status - if ($?) { - Write-Host " [o] Succeeded" - } else { - Write-Host " [x] Failed!" - } -} \ No newline at end of file diff --git a/deployment/secure_research_environment/remote/create_rds/scripts/Set_OS_Locale_and_DNS.ps1 b/deployment/secure_research_environment/remote/create_rds/scripts/Set_OS_Locale_and_DNS.ps1 deleted file mode 100644 index bcb317ab25..0000000000 --- a/deployment/secure_research_environment/remote/create_rds/scripts/Set_OS_Locale_and_DNS.ps1 +++ /dev/null @@ -1,27 +0,0 @@ -# Don't make parameters mandatory as if there is any issue binding them, the script will prompt for them -# and remote execution will stall waiting for the non-present user to enter the missing parameter on the -# command line. This take up to 90 minutes to timeout, though you can try running resetState.cmd in -# C:\Packages\Plugins\Microsoft.CPlat.Core.RunCommandWindows\1.1.0 on the remote VM to cancel a stalled -# job, but this does not seem to have an immediate effect -# For details, see https://docs.microsoft.com/en-gb/azure/virtual-machines/windows/run-command -param( - [Parameter(Position=0, HelpMessage = "SRE fully qualified domain name")] - [string]$sreFqdn, - [Parameter(Position=1, HelpMessage = "SHM fully qualified domain name")] - [string]$shmFqdn -) - - -# LOCALE CODE IS PROGRAMATICALLY INSERTED HERE - - -# Set DNS defaults -# ---------------- -Write-Host "Setting DNS search order to: $sreFqdn, $shmFqdn" -$class = [wmiclass]'Win32_NetworkAdapterConfiguration' -$_ = $class.SetDNSSuffixSearchOrder(@("$sreFqdn", "$shmFqdn")) -if ($?) { - Write-Host " [o] Completed" -} else { - Write-Host " [x] Failed" -} \ No newline at end of file diff --git a/deployment/secure_research_environment/remote/create_rds/templates/Deploy_RDS_Environment.template.ps1 b/deployment/secure_research_environment/remote/create_rds/templates/Deploy_RDS_Environment.template.ps1 index bfc32b2c1e..b36df54886 100644 --- a/deployment/secure_research_environment/remote/create_rds/templates/Deploy_RDS_Environment.template.ps1 +++ b/deployment/secure_research_environment/remote/create_rds/templates/Deploy_RDS_Environment.template.ps1 @@ -53,7 +53,7 @@ try { # Create collections # ------------------ foreach($rdsConfiguration in @(("Applications", "", "", "F:\AppFileShares"), - ("Desktop (Windows)", "", "", "G:\RDPFileShares"), + ("Windows (Desktop)", "", "", "G:\RDPFileShares"), ("Review", "", "", "H:\ReviewFileShares"))) { $collectionName, $sessionHost, $userGroup, $sharePath = $rdsConfiguration @@ -103,10 +103,9 @@ try { # --------------------------- Write-Output "Updating server configuration..." try { - Get-Process ServerManager -ErrorAction SilentlyContinue | Stop-Process -Force foreach ($targetDirectory in @("C:\Users\\AppData\Roaming\Microsoft\Windows\ServerManager", - "C:\Users\.\AppData\Roaming\Microsoft\Windows\ServerManager")) { + "C:\Users\.\AppData\Roaming\Microsoft\Windows\ServerManager")) { $_ = New-Item -ItemType Directory -Path $targetDirectory -Force -ErrorAction Stop Copy-Item -Path "\ServerList.xml" -Destination "$targetDirectory\ServerList.xml" -Force -ErrorAction Stop } diff --git a/deployment/secure_research_environment/setup/Setup_SRE_VNET_RDS.ps1 b/deployment/secure_research_environment/setup/Setup_SRE_VNET_RDS.ps1 index f330278553..44105a39bf 100644 --- a/deployment/secure_research_environment/setup/Setup_SRE_VNET_RDS.ps1 +++ b/deployment/secure_research_environment/setup/Setup_SRE_VNET_RDS.ps1 @@ -23,22 +23,22 @@ $_ = Set-AzContext -SubscriptionId $config.sre.subscriptionName $_ = Deploy-ResourceGroup -Name $config.sre.network.vnet.rg -Location $config.sre.location -# # Create VNet from template -# # ------------------------- -# Add-LogMessage -Level Info "Creating virtual network '$($config.sre.network.vnet.name)' from template..." -# $params = @{ -# "Virtual Network Name" = $config.sre.network.vnet.Name -# "Virtual Network Address Space" = $config.sre.network.vnet.cidr -# "Subnet-Identity Address Prefix" = $config.sre.network.subnets.identity.cidr -# "Subnet-RDS Address Prefix" = $config.sre.network.subnets.rds.cidr -# "Subnet-Data Address Prefix" = $config.sre.network.subnets.data.cidr -# "Subnet-Identity Name" = $config.sre.network.subnets.identity.Name -# "Subnet-RDS Name" = $config.sre.network.subnets.rds.Name -# "Subnet-Data Name" = $config.sre.network.subnets.data.Name -# "VNET_DNS_DC1" = $config.shm.dc.ip -# "VNET_DNS_DC2" = $config.shm.dcb.ip -# } -# Deploy-ArmTemplate -TemplatePath (Join-Path $PSScriptRoot ".." "arm_templates" "sre-vnet-gateway-template.json") -Params $params -ResourceGroupName $config.sre.network.vnet.rg +# Create VNet from template +# ------------------------- +Add-LogMessage -Level Info "Creating virtual network '$($config.sre.network.vnet.name)' from template..." +$params = @{ + "Virtual Network Name" = $config.sre.network.vnet.Name + "Virtual Network Address Space" = $config.sre.network.vnet.cidr + "Subnet-Identity Address Prefix" = $config.sre.network.subnets.identity.cidr + "Subnet-RDS Address Prefix" = $config.sre.network.subnets.rds.cidr + "Subnet-Data Address Prefix" = $config.sre.network.subnets.data.cidr + "Subnet-Identity Name" = $config.sre.network.subnets.identity.Name + "Subnet-RDS Name" = $config.sre.network.subnets.rds.Name + "Subnet-Data Name" = $config.sre.network.subnets.data.Name + "VNET_DNS_DC1" = $config.shm.dc.ip + "VNET_DNS_DC2" = $config.shm.dcb.ip +} +Deploy-ArmTemplate -TemplatePath (Join-Path $PSScriptRoot ".." "arm_templates" "sre-vnet-gateway-template.json") -Params $params -ResourceGroupName $config.sre.network.vnet.rg # Fetch VNet information @@ -107,7 +107,6 @@ $vmNamePairs = @(("RDS Gateway", $config.sre.rds.gateway.vmName), Add-LogMessage -Level Info "Creating/retrieving secrets from key vault '$($config.sre.keyVault.name)'..." $dataSubnetIpPrefix = $config.sre.network.subnets.data.prefix $airlockSubnetIpPrefix = $config.sre.network.subnets.airlock.prefix -$npsSecret = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.Name -SecretName $config.sre.keyVault.secretNames.npsSecret -DefaultLength 12 $rdsGatewayVmFqdn = $config.sre.rds.gateway.fqdn $rdsGatewayVmName = $config.sre.rds.gateway.vmName $rdsSh1VmFqdn = $config.sre.rds.sessionHost1.fqdn @@ -124,7 +123,6 @@ $shmNetbiosName = $config.shm.domain.netbiosName $sreAdminPassword = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.Name -SecretName $config.sre.keyVault.secretNames.rdsAdminPassword $sreAdminUsername = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.Name -SecretName $config.sre.keyVault.secretNames.adminUsername -DefaultValue "sre$($config.sre.id)admin".ToLower() $sreFqdn = $config.sre.domain.fqdn -$sreNetbiosName = $config.sre.domain.netbiosName # Ensure that boot diagnostics resource group and storage account exist @@ -267,7 +265,7 @@ $template.Replace('',$rdsGatewayVmFqdn). Replace('',$rdsSh3VmFqdn). Replace('',$sreFqdn) | Out-File $serverListLocalFilePath -# Copy existing files +# Copy installers from SHM storage Add-LogMessage -Level Info "[ ] Copying RDS installers to storage account '$($sreStorageAccount.StorageAccountName)'" $blobs = Get-AzStorageBlob -Context $shmStorageAccount.Context -Container $containerNameSessionHosts $blobs | Start-AzStorageBlobCopy -Context $shmStorageAccount.Context -DestContext $sreStorageAccount.Context -DestContainer $containerNameSessionHosts -Force @@ -306,12 +304,11 @@ $baseDnsRecordname = "@" $gatewayDnsRecordname = "$($config.sre.rds.gateway.hostname)".ToLower() $dnsResourceGroup = $config.shm.dns.rg $dnsTtlSeconds = 30 -$sreDomain = $config.sre.domain.fqdn # Set the A record -Add-LogMessage -Level Info "[ ] Setting 'A' record for gateway host to '$rdsGatewayPublicIp' in SRE $($config.sre.id) DNS zone ($sreDomain)" -Remove-AzDnsRecordSet -Name $baseDnsRecordname -RecordType A -ZoneName $sreDomain -ResourceGroupName $dnsResourceGroup -$result = New-AzDnsRecordSet -Name $baseDnsRecordname -RecordType A -ZoneName $sreDomain -ResourceGroupName $dnsResourceGroup ` +Add-LogMessage -Level Info "[ ] Setting 'A' record for gateway host to '$rdsGatewayPublicIp' in SRE $($config.sre.id) DNS zone ($sreFqdn)" +Remove-AzDnsRecordSet -Name $baseDnsRecordname -RecordType A -ZoneName $sreFqdn -ResourceGroupName $dnsResourceGroup +$result = New-AzDnsRecordSet -Name $baseDnsRecordname -RecordType A -ZoneName $sreFqdn -ResourceGroupName $dnsResourceGroup ` -Ttl $dnsTtlSeconds -DnsRecords (New-AzDnsRecordConfig -IPv4Address $rdsGatewayPublicIp) if ($?) { Add-LogMessage -Level Success "Successfully set 'A' record for gateway host" @@ -320,10 +317,10 @@ if ($?) { } # Set the CNAME record -Add-LogMessage -Level Info "[ ] Setting CNAME record for gateway host to point to the 'A' record in SRE $($config.sre.id) DNS zone ($sreDomain)" -Remove-AzDnsRecordSet -Name $gatewayDnsRecordname -RecordType CNAME -ZoneName $sreDomain -ResourceGroupName $dnsResourceGroup -$result = New-AzDnsRecordSet -Name $gatewayDnsRecordname -RecordType CNAME -ZoneName $sreDomain -ResourceGroupName $dnsResourceGroup ` - -Ttl $dnsTtlSeconds -DnsRecords (New-AzDnsRecordConfig -Cname $sreDomain) +Add-LogMessage -Level Info "[ ] Setting CNAME record for gateway host to point to the 'A' record in SRE $($config.sre.id) DNS zone ($sreFqdn)" +Remove-AzDnsRecordSet -Name $gatewayDnsRecordname -RecordType CNAME -ZoneName $sreFqdn -ResourceGroupName $dnsResourceGroup +$result = New-AzDnsRecordSet -Name $gatewayDnsRecordname -RecordType CNAME -ZoneName $sreFqdn -ResourceGroupName $dnsResourceGroup ` + -Ttl $dnsTtlSeconds -DnsRecords (New-AzDnsRecordConfig -Cname $sreFqdn) if ($?) { Add-LogMessage -Level Success "Successfully set 'CNAME' record for gateway host" } else { @@ -349,117 +346,59 @@ Write-Output $result.Value $_ = Set-AzContext -SubscriptionId $config.sre.subscriptionName -# Set locale, install updates and reboot -# -------------------------------------- -foreach ($nameVMNameParamsPair in $vmNamePairs) { - $name, $vmName = $nameVMNameParamsPair - Add-LogMessage -Level Info "Updating ${name}: '$vmName'..." - $params = @{} - # The RDS Gateway needs the RDWebClientManagement Powershell module - if ($name -eq "RDS Gateway") { $params["AdditionalPowershellModules"] = @("RDWebClientManagement") } - Invoke-WindowsConfigureAndUpdate -VMName $vmName -ResourceGroupName $config.sre.rds.rg @params -} - - -# Import files to RDS VMs -# ----------------------- +# Import files from blob storage to RDS VMs and install them if appropriate +# ------------------------------------------------------------------------- Add-LogMessage -Level Info "Importing files from storage to RDS VMs..." -$_ = Set-AzContext -SubscriptionId $config.shm.subscriptionName -# Get list of packages for each session host -Add-LogMessage -Level Info "[ ] Getting list of packages for each VM" -$filePathsSh1 = New-Object System.Collections.ArrayList ($null) -$filePathsSh2 = New-Object System.Collections.ArrayList ($null) -$filePathsSh3 = New-Object System.Collections.ArrayList ($null) +# Set correct list of package from blob storage for each session host +$blobfiles = @{} +$vmNamePairs | ForEach-Object { $blobfiles[$_[1]] = @() } foreach ($blob in Get-AzStorageBlob -Container $containerNameSessionHosts -Context $sreStorageAccount.Context) { if (($blob.Name -like "*GoogleChrome_x64.msi") -or ($blob.Name -like "*PuTTY_x64.msi") -or ($blob.Name -like "*WinSCP_x32.exe")) { - $_ = $filePathsSh1.Add($blob.Name) - $_ = $filePathsSh2.Add($blob.Name) - $_ = $filePathsSh3.Add($blob.Name) + $blobfiles[$config.sre.rds.sessionHost1.vmName] += @{$containerNameSessionHosts = $blob.Name} + $blobfiles[$config.sre.rds.sessionHost2.vmName] += @{$containerNameSessionHosts = $blob.Name} + $blobfiles[$config.sre.rds.sessionHost3.vmName] += @{$containerNameSessionHosts = $blob.Name} } elseif ($blob.Name -like "*LibreOffice_x64.msi") { - $_ = $filePathsSh2.Add($blob.Name) + $blobfiles[$config.sre.rds.sessionHost2.vmName] += @{$containerNameSessionHosts = $blob.Name} } } # ... and for the gateway -$filePathsGateway = New-Object System.Collections.ArrayList ($null) foreach ($blob in Get-AzStorageBlob -Container $containerNameGateway -Context $sreStorageAccount.Context) { - $_ = $filePathsGateway.Add($blob.Name) -} -Add-LogMessage -Level Success "Found $($filePathsSh1.Count + $filePathsSh2.Count) packages in total" - -# Get SAS token to download files from storage account -$_ = Set-AzContext -SubscriptionId $config.sre.subscriptionName -$sasToken = New-ReadOnlyAccountSasToken -SubscriptionName $config.sre.subscriptionName -ResourceGroup $config.sre.storage.artifacts.rg -AccountName $sreStorageAccount.StorageAccountName -$scriptPath = Join-Path $PSScriptRoot ".." "remote" "create_rds" "scripts" "Import_Artifacts.ps1" - -# Copy software and/or scripts to RDS Gateway -Add-LogMessage -Level Info "[ ] Copying $($filePathsGateway.Count) files to RDS Gateway" -$params = @{ - storageAccountName = "`"$($sreStorageAccount.StorageAccountName)`"" - storageService = "blob" - shareOrContainerName = "`"$containerNameGateway`"" - sasToken = "`"$sasToken`"" - pipeSeparatedremoteFilePaths = "`"$($filePathsGateway -join "|")`"" - downloadDir = "$remoteUploadDir" -} -$result = Invoke-RemoteScript -Shell "PowerShell" -ScriptPath $scriptPath -VMName $config.sre.rds.gateway.vmName -ResourceGroupName $config.sre.rds.rg -Parameter $params -Write-Output $result.Value - -# Copy software and/or scripts to RDS SH1 (App server) -Add-LogMessage -Level Info "[ ] Copying $($filePathsSh1.Count) files to RDS Session Host (App server)" -$params = @{ - storageAccountName = "`"$($sreStorageAccount.StorageAccountName)`"" - storageService = "blob" - shareOrContainerName = "`"$containerNameSessionHosts`"" - sasToken = "`"$sasToken`"" - pipeSeparatedremoteFilePaths = "`"$($filePathsSh1 -join "|")`"" - downloadDir = "$remoteUploadDir" -} -$result = Invoke-RemoteScript -Shell "PowerShell" -ScriptPath $scriptPath -VMName $config.sre.rds.sessionHost1.vmName -ResourceGroupName $config.sre.rds.rg -Parameter $params -Write-Output $result.Value - -# Copy software and/or scripts to RDS SH2 (Remote desktop server) -Add-LogMessage -Level Info "[ ] Copying $($filePathsSh2.Count) files to RDS Session Host (Remote desktop server)" -$params = @{ - storageAccountName = "`"$($sreStorageAccount.StorageAccountName)`"" - storageService = "blob" - shareOrContainerName = "`"$containerNameSessionHosts`"" - sasToken = "`"$sasToken`"" - pipeSeparatedremoteFilePaths = "`"$($filePathsSh2 -join "|")`"" - downloadDir = "$remoteUploadDir" + $blobfiles[$config.sre.rds.gateway.vmName] += @{$containerNameGateway = $blob.Name} } -$result = Invoke-RemoteScript -Shell "PowerShell" -ScriptPath $scriptPath -VMName $config.sre.rds.sessionHost2.vmName -ResourceGroupName $config.sre.rds.rg -Parameter $params -Write-Output $result.Value -# Copy software and/or scripts to RDS SH3 (Review server) -Add-LogMessage -Level Info "[ ] Copying $($filePathsSh3.Count) files to RDS Session Host (Review server)" -$params = @{ - storageAccountName = "`"$($sreStorageAccount.StorageAccountName)`"" - storageService = "blob" - shareOrContainerName = "`"$containerNameSessionHosts`"" - sasToken = "`"$sasToken`"" - pipeSeparatedremoteFilePaths = "`"$($filePathsSh3 -join "|")`"" - downloadDir = "$remoteUploadDir" +# Copy software and/or scripts to RDS VMs +$scriptPath = Join-Path $PSScriptRoot ".." "remote" "create_rds" "scripts" "Import_And_Install_Blobs.ps1" +foreach ($nameVMNameParamsPair in $vmNamePairs) { + $name, $vmName = $nameVMNameParamsPair + $containerName = $blobfiles[$vmName] | ForEach-Object { $_.Keys } | Select-Object -First 1 + $fileNames = $blobfiles[$vmName] | ForEach-Object { $_.Values } + $sasToken = New-ReadOnlyAccountSasToken -SubscriptionName $config.sre.subscriptionName -ResourceGroup $config.sre.storage.artifacts.rg -AccountName $sreStorageAccount.StorageAccountName + Add-LogMessage -Level Info "[ ] Copying $($fileNames.Count) files to $name" + $params = @{ + storageAccountName = "`"$($sreStorageAccount.StorageAccountName)`"" + storageService = "blob" + shareOrContainerName = "`"$containerName`"" + sasToken = "`"$sasToken`"" + pipeSeparatedRemoteFilePaths = "`"$($fileNames -join "|")`"" + downloadDir = "$remoteUploadDir" + } + $result = Invoke-RemoteScript -Shell "PowerShell" -ScriptPath $scriptPath -VMName $vmName -ResourceGroupName $config.sre.rds.rg -Parameter $params + Write-Output $result.Value } -$result = Invoke-RemoteScript -Shell "PowerShell" -ScriptPath $scriptPath -VMName $config.sre.rds.sessionHost3.vmName -ResourceGroupName $config.sre.rds.rg -Parameter $params -Write-Output $result.Value -# Install packages on RDS VMs -# --------------------------- -Add-LogMessage -Level Info "Installing packages on RDS VMs..." -$_ = Set-AzContext -SubscriptionId $config.sre.subscriptionName +# Set locale, install updates and reboot +# -------------------------------------- foreach ($nameVMNameParamsPair in $vmNamePairs) { $name, $vmName = $nameVMNameParamsPair - if ($name -ne "RDS Gateway") { - Add-LogMessage -Level Info "[ ] Installing packages on ${name}: '$vmName'" - $scriptPath = Join-Path $PSScriptRoot ".." "remote" "create_rds" "scripts" "Install_Packages.ps1" - $result = Invoke-RemoteScript -Shell "PowerShell" -ScriptPath $scriptPath -VMName $vmName -ResourceGroupName $config.sre.rds.rg - Write-Output $result.Value - } + Add-LogMessage -Level Info "Updating ${name}: '$vmName'..." + $params = @{} + # The RDS Gateway needs the RDWebClientManagement Powershell module + if ($name -eq "RDS Gateway") { $params["AdditionalPowershellModules"] = @("RDWebClientManagement") } + Invoke-WindowsConfigureAndUpdate -VMName $vmName -ResourceGroupName $config.sre.rds.rg @params } - # Add VMs to correct NSG # ---------------------- Add-VmToNSG -VMName $config.sre.rds.gateway.vmName -NSGName $config.sre.rds.gateway.nsg @@ -472,13 +411,7 @@ Add-VmToNSG -VMName $config.sre.rds.sessionHost3.vmName -NSGName $config.sre.rds # ---------------------- foreach ($nameVMNameParamsPair in $vmNamePairs) { $name, $vmName = $nameVMNameParamsPair - Add-LogMessage -Level Info "Rebooting the ${name} VM: '$vmName'" Enable-AzVM -Name $vmName -ResourceGroupName $config.sre.rds.rg - if ($?) { - Add-LogMessage -Level Success "Rebooting the ${name} succeeded" - } else { - Add-LogMessage -Level Fatal "Rebooting the ${name} failed!" - } } diff --git a/environment_configs/full/sre_testasandbox_full_config.json b/environment_configs/full/sre_testasandbox_full_config.json index 33afaa66cb..d1050c5b92 100644 --- a/environment_configs/full/sre_testasandbox_full_config.json +++ b/environment_configs/full/sre_testasandbox_full_config.json @@ -268,7 +268,7 @@ "researchers": { "test": { "name": "SANDBOX Test Researcher", - "samAccountName": "testresrchsandbox" + "samAccountName": "researchersandbox" } } }, From 23a2733e51a020b192ae445c2b10b081a9572791 Mon Sep 17 00:00:00 2001 From: Jack Roberts Date: Mon, 4 May 2020 14:33:42 +0100 Subject: [PATCH 019/155] wait for gitlab server health check before interacting with api --- .../cloud-init-gitlab-external.template.yaml | 60 ++++++++++++------- 1 file changed, 39 insertions(+), 21 deletions(-) diff --git a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml index cb7d4a3e12..1f0df5f26c 100644 --- a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml +++ b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml @@ -147,7 +147,7 @@ write_files: os.system("git push gitlab-internal " + repo["gitlab_branch"]) os.chdir("..") - # Script for monitoring and accepting approved merge requests + # Script for monitoring and accepting approval merge requests - path: "/home//check_merge_requests.py" permissions: "0755" content: | @@ -233,7 +233,7 @@ write_files: def get_merge_requests_for_approval(config): - group = get_group_id("approved", config) + group = get_group_id("approval", config) endpoint = config["api_url"] + f"/groups/{group}/merge_requests" response = get_request( endpoint, headers=config["headers"], params={"state": "opened"} @@ -407,6 +407,40 @@ runcmd: chown : "/home//.ssh/id_ed25519" chown : "/home//.ssh/id_ed25519.pub" # -------------------------------- + # SETUP ACCESS TO GITLAB INTERNAL + # -------------------------------- + - echo "Configuring access to gitlab internal" + # Change ownership of secrets to + - | + chown : "/home//.secrets/gitlab-internal-api-token"; + chown : "/home//.secrets/gitlab-internal-ip-address"; + chown : "/home//.secrets/gitlab-internal-username"; + chown : "/home//.secrets/gitlab-internal-user-email"; + # Create SSH key for gitlab internal access, add gitlab internal to known hosts + - | + key=$(cat /home//.ssh/id_ed25519.pub); + curl --header 'Authorization: Bearer ' --header 'Content-Type:application/json' --data "{\"key\": \"$key\", \"title\": \"InternalAPIUser\"}" /api/v4/user/keys; + ssh-keyscan -H >> /home//.ssh/known_hosts; + chown : "/home//.ssh/known_hosts" + # -------------------------------- + # WAIT FOR GITLAB EXTERNAL HEALTH CHECK + # -------------------------------- + - | + attempt_counter=0 + max_attempts=60 + echo "Waiting for GitLab OK health check" + until [[ $(curl -s localhost/-/health) == "GitLab OK" ]] + do + if [ ${attempt_counter} -eq ${max_attempts} ];then + echo + echo "FAILED: Max GitLab attempts reached. Exiting." + exit 1 + fi + printf "." + attempt_counter=$(($attempt_counter+1)) + sleep 10 + done + # -------------------------------- # SETUP ACCESS TO GITLAB EXTERNAL # -------------------------------- - echo "Configuring access to gitlab external" @@ -422,26 +456,10 @@ runcmd: curl --header 'Authorization: Bearer ' --header 'Content-Type:application/json' --data "{\"key\": \"$key\", \"title\": \"ExternalAPIUser\"}" /api/v4/user/keys; ssh-keyscan -H >> /home//.ssh/known_hosts; chown : "/home//.ssh/known_hosts" - # Create groups for storing unapproved and approved repos + # Create groups for storing unapproved and approval repos - | - curl --header "Authorization: Bearer " --data "name=approved&path=approved&visibility=public" /api/v4/groups; - curl --header "Authorization: Bearer " --data "name=unapproved&path=unapproved&visibility=public" /api/v4/groups - # -------------------------------- - # SETUP ACCESS TO GITLAB INTERNAL - # -------------------------------- - - echo "Configuring access to gitlab internal" - # Change ownership of secrets to - - | - chown : "/home//.secrets/gitlab-internal-api-token"; - chown : "/home//.secrets/gitlab-internal-ip-address"; - chown : "/home//.secrets/gitlab-internal-username"; - chown : "/home//.secrets/gitlab-internal-user-email"; - # Create SSH key for gitlab internal access, add gitlab internal to known hosts - - | - key=$(cat /home//.ssh/id_ed25519.pub); - curl --header 'Authorization: Bearer ' --header 'Content-Type:application/json' --data "{\"key\": \"$key\", \"title\": \"InternalAPIUser\"}" /api/v4/user/keys; - ssh-keyscan -H >> /home//.ssh/known_hosts; - chown : "/home//.ssh/known_hosts" + curl --header "Authorization: Bearer " --data "name=approval&path=approval&visibility=public" /api/v4/groups; + curl --header "Authorization: Bearer " --data "name=unapproved&path=unapproved&visibility=public" /api/v4/groups # -------------------------------- # FINAL SETUP # -------------------------------- From 59d307397340d09fccbdba6ff2fa3cd59d4e2aad Mon Sep 17 00:00:00 2001 From: Jack Roberts Date: Mon, 4 May 2020 16:47:13 +0100 Subject: [PATCH 020/155] replace bash syntax --- .../cloud_init/cloud-init-gitlab-external.template.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml index 1f0df5f26c..084a17998b 100644 --- a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml +++ b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml @@ -429,7 +429,7 @@ runcmd: attempt_counter=0 max_attempts=60 echo "Waiting for GitLab OK health check" - until [[ $(curl -s localhost/-/health) == "GitLab OK" ]] + until [ $(curl -s localhost/-/health) = "GitLab OK" ] do if [ ${attempt_counter} -eq ${max_attempts} ];then echo From ab9de4cb7811a0a98704f3e4b899d56973b92198 Mon Sep 17 00:00:00 2001 From: nbarlowATI Date: Tue, 5 May 2020 07:03:56 +0100 Subject: [PATCH 021/155] Script to create zipfile from specified commit on a git repo, then upload to GitlabExternal VM via blob storage --- .../SRE_Upload_Git_Repo_to_GitlabExternal.ps1 | 113 ++++++++++++++++++ 1 file changed, 113 insertions(+) create mode 100644 deployment/administration/SRE_Upload_Git_Repo_to_GitlabExternal.ps1 diff --git a/deployment/administration/SRE_Upload_Git_Repo_to_GitlabExternal.ps1 b/deployment/administration/SRE_Upload_Git_Repo_to_GitlabExternal.ps1 new file mode 100644 index 0000000000..2a5d19713c --- /dev/null +++ b/deployment/administration/SRE_Upload_Git_Repo_to_GitlabExternal.ps1 @@ -0,0 +1,113 @@ +param( + [Parameter(Mandatory = $true, HelpMessage = "Enter SRE ID (usually a number e.g enter '9' for DSG9)")] + [string]$sreId, + [Parameter( Mandatory = $true, HelpMessage = "Enter repo URL")] + [string]$repoURL, + [Parameter( Mandatory = $true, HelpMessage = "Enter repo name")] + [string]$repoName, + [Parameter( Mandatory = $true, HelpMessage = "Enter commit hash of the desired commit on external repository")] + [string]$commitHash, + [Parameter( Mandatory = $true, HelpMessage = "Enter desired branch name for the project inside Safe Haven")] + [string]$branchName +) + +Import-Module Az +Import-Module $PSScriptRoot/../common/Configuration.psm1 -Force +Import-Module $PSScriptRoot/../common/Logging.psm1 -Force +Import-Module $PSScriptRoot/../common/Deployments.psm1 -Force +Import-Module $PSScriptRoot/../common/GenerateSasToken.psm1 -Force + +# Get config and original context before changing subscription +# ------------------------------------------------------------ +#$config = Get-ShmFullConfig $shmId +$config = Get-SreConfig $sreId +$originalContext = Get-AzContext +$_ = Set-AzContext -SubscriptionId $config.sre.subscriptionName + +# Create local zip file +# --------------------- +Add-LogMessage -Level Info "Creating zipfilepath." +$zipFileName = "${repoName}_${commitHash}_${branchName}.zip" +$zipFilePath = Join-Path $PSScriptRoot $zipFileName +$tempDir = New-Item -ItemType Directory -Path (Join-Path ([System.IO.Path]::GetTempPath()) ([System.IO.Path]::GetRandomFileName()) "") + +Add-LogMessage -Level Info "About to git clone " +$tempDir = New-Item -ItemType Directory -Path (Join-Path ([System.IO.Path]::GetTempPath()) ([System.IO.Path]::GetRandomFileName()) "$repoName") + +Invoke-Expression -Command "git clone $repoURL $tempDir" +$workingDir = Get-Location +Set-Location $tempDir +Invoke-Expression -Command "git checkout $commitHash" +# Remove the .git directory +Remove-Item -Path ".git" -Recurse -Force +# Zip this directory +if (Test-Path $zipFilePath) { Remove-Item $zipFilePath } +Compress-Archive -CompressionLevel NoCompression -Path $tempDir -DestinationPath $zipFilePath +if ($?) { + Add-LogMessage -Level Success "Zip file creation succeeded! $zipFilePath" +} else { + Add-LogMessage -Level Fatal "Zip file creation failed!" +} +Set-Location $workingDir + + +# Upload the zip file to the VM, via blob storage +# ----------------------------------------------- + +$gitlabExternalVmName = $config.sre.webapps.gitlab.external.vmName +# Go via blob storage - first create storage account if not already there +$resourceGroupName = $config.sre.webapps.rg +$sreStorageAccountName = $config.sre.storage.artifacts.accountName +$sreStorageAccount = Deploy-StorageAccount -Name $sreStorageAccountName -ResourceGroupName $resourceGroupName -Location $config.sre.location + +# Create container if not already there +$containerName = $config.sre.storage.artifacts.gitlabAirlockContainerName +Add-LogMessage -Level Info "Creating blob storage container $containerName in storage account $sreStorageAccountName ..." +$_ = Deploy-StorageContainer -Name $containerName -StorageAccount $sreStorageAccount +# delete existing blobs on the container +$blobs = @(Get-AzStorageBlob -Container $containerName -Context $sreStorageAccount.Context) +$numBlobs = $blobs.Length +if ($numBlobs -gt 0) { + Add-LogMessage -Level Info "[ ] deleting $numBlobs blobs aready in container '$containerName'..." + $blobs | ForEach-Object { Remove-AzStorageBlob -Blob $_.Name -Container $containerName -Context $sreStorageAccount.Context -Force } + while ($numBlobs -gt 0) { + Start-Sleep -Seconds 5 + $numBlobs = (Get-AzStorageBlob -Container $containerName -Context $sreStorageAccount.Context).Length + } + if ($?) { + Add-LogMessage -Level Success "Blob deletion succeeded" + } else { + Add-LogMessage -Level Fatal "Blob deletion failed!" + } +} + +# copy zipfile to blob storage +# ---------------------------- +Add-LogMessage -Level Info "Upload zipfile to storage..." +Set-AzStorageBlobContent -Container $containerName -Context $sreStorageAccount.Context -File $zipFilePath -Blob $zipFileName -Force + +# Download zipfile onto the remote machine +# ---------------------------------------- +# Get a SAS token and construct URL +$sasToken = New-ReadOnlyAccountSasToken -ResourceGroup $resourceGroupName -AccountName $sreStorageAccount.StorageAccountName -SubscriptionName $config.sre.subscriptionName +$remoteUrl = "https://$($sreStorageAccount.StorageAccountName).blob.core.windows.net/${containerName}/${zipFileName}${sasToken}" +Add-LogMessage -Level Info "Got SAS token and URL $remoteUrl" + +# Create remote script (make a directory /zfiles/ and run CURL to download blob to there) +$script = @" +#!/bin/bash +mkdir -p /zfiles +curl -X GET -o /zfiles/${zipFileName} ${remoteUrl} +"@ + +Add-LogMessage -Level Info "[ ] Running remote script to download zipfile onto $gitlabExternalVmName" +$result = Invoke-RemoteScript -Shell "UnixShell" -Script $script -VMName $gitlabExternalVmName -ResourceGroupName $resourceGroupName + +# clean up - remove the zipfile from local machine. +Add-LogMessage -Level Info "[ ] Removing original zipfile $zipFilePath" +Remove-Item -Path $zipFilePath + + +# Switch back to original subscription +# ------------------------------------ +$_ = Set-AzContext -Context $originalContext From 088196364d38dfdaaf2eb52a0b28d370bdd70e52 Mon Sep 17 00:00:00 2001 From: Jack Roberts Date: Mon, 4 May 2020 20:17:22 +0100 Subject: [PATCH 022/155] couple more shell formatting fixes --- .../cloud_init/cloud-init-gitlab-external.template.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml index 084a17998b..37403977d7 100644 --- a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml +++ b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml @@ -429,7 +429,7 @@ runcmd: attempt_counter=0 max_attempts=60 echo "Waiting for GitLab OK health check" - until [ $(curl -s localhost/-/health) = "GitLab OK" ] + until [ "$(curl -s localhost/-/health)" = "GitLab OK" ] do if [ ${attempt_counter} -eq ${max_attempts} ];then echo @@ -437,7 +437,7 @@ runcmd: exit 1 fi printf "." - attempt_counter=$(($attempt_counter+1)) + attempt_counter=$((attempt_counter+1)) sleep 10 done # -------------------------------- From b5b2337f1e1bdde3b365f5f0eb7f938dfd84f030 Mon Sep 17 00:00:00 2001 From: Oliver Strickson Date: Mon, 4 May 2020 18:38:52 +0100 Subject: [PATCH 023/155] Add function to create or push to a repo on GITLAB-INTERNAL --- .../cloud-init-gitlab-external.template.yaml | 89 +++++++++++++++++-- 1 file changed, 80 insertions(+), 9 deletions(-) diff --git a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml index 37403977d7..1311e8ed8f 100644 --- a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml +++ b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml @@ -152,11 +152,12 @@ write_files: permissions: "0755" content: | import requests + import subprocess + from urllib.parse import quote as url_quote from pathlib import Path import logging from logging.handlers import RotatingFileHandler - logger = logging.getLogger("merge_requests_logger") logger.setLevel(logging.INFO) formatter = logging.Formatter("%(asctime)s [%(levelname)s] %(message)s") @@ -169,10 +170,83 @@ write_files: logger.addHandler(f_handler) logger.addHandler(c_handler) + global HOME + global GL_INTERNAL_IP + global GL_INTERNAL_TOKEN + global GL_INTERNAL_AUTH_HEADER + + HOME = str(Path.home()) + + with open(f"{HOME}/.secrets/gitlab-internal-ip-address", "r") as f: + GL_INTERNAL_IP = f.readlines()[0].strip() + + GL_INTERNAL_URL = "http://" + GL_INTERNAL_IP + "/api/v4" + + with open(f"{HOME}/.secrets/gitlab-internal-api-token", "r") as f: + GL_INTERNAL_TOKEN = f.readlines()[0].strip() - def push_to_internal(project_url, project_name, target_branch, commit_sha): - # TODO Replace with Oliver's function - logger.info(f"Pushing {project_name} to gitlab internal") + GL_INTERNAL_AUTH_HEADER = {"Authorization": "Bearer " + GL_INTERNAL_TOKEN} + + + def internal_project_exists(repo_name): + """Given a string (the name of a repo - not a URL), returns a pair + (exists, url): + - exists: boolean - does repo_name exist on GITLAB-INTERNAL? + - url: str - the ssh url to the repo (when 'exists' is true) + """ + + # build url-encoded repo_name + repo_path_encoded = url_quote("ingress/" + repo_name, safe='') + + # Does repo_name exist on GITLAB-INTERNAL? + response = requests.get(GL_INTERNAL_URL + '/projects/' + repo_path_encoded, + headers=GL_INTERNAL_AUTH_HEADER) + + if response.status_code == 404: + return (False, "") + elif response.status_code == 200: + return (True, response.json()["ssh_url_to_repo"]) + else: + # Not using `response.raise_for_status()`, since we also want + # to raise an exception on unexpected "successful" responses + # (not 200) + raise requests.HTTPError("Unexpected response: " + response.reason + + ", content: " + response.text) + + + def internal_update_repo(gh_url, repo_name): + """Takes a GitHub URL, `gh_url`, which should be the URL to the + "APPROVED" repo, clones it and pushes all branches to the repo + `repo_name` owned by 'ingress' on GITLAB-INTERNAL, creating it + there first if it doesn't exist. + """ + # clone the repo from gh_url (on GITLAB-EXTERNAL), removing any of + # the same name first (simpler than checking if it exists, has the + # same remote and pulling) + subprocess.run(["rm", "-rf", repo_name], check=True) + subprocess.run(["git", "clone", gh_url, repo_name], check=True) + + project_exists, gl_internal_repo_url = internal_project_exists(repo_name) + + # create the project if it doesn't exist + if not project_exists: + print("Creating: " + repo_name) + response = requests.post(GL_INTERNAL_URL + '/projects', + headers=GL_INTERNAL_AUTH_HEADER, + data={"name": repo_name, + "visibility": "public"}) + response.raise_for_status() + assert(response.json()["path_with_namespace"] == "ingress/" + repo_name) + + gl_internal_repo_url = response.json()["ssh_url_to_repo"] + + # Set the remote + subprocess.run(["git", "remote", "add", "gitlab-internal", + gl_internal_repo_url], cwd=repo_name, check=True) + + # Force push current contents of all branches + subprocess.run(["git", "push", "--force", "--all", + "gitlab-internal"], cwd=repo_name, check=True) def get_request(endpoint, headers, params=None): @@ -335,11 +409,9 @@ write_files: except Exception as e: logger.error(f"Failed to log accepted merge request: {e}") try: - push_to_internal( + internal_update_repo( target_project["ssh_url_to_repo"], - target_project["name"], - mr["target_branch"], - result["merge_commit_sha"], + target_project["name"] ) except Exception as e: logger.error(f"Failed to push to internal: {e}") @@ -353,7 +425,6 @@ write_files: if __name__ == "__main__": check_merge_requests() - runcmd: # -------------------------------- From 53485ba11d67c0412c348c815b5ffaa595794b73 Mon Sep 17 00:00:00 2001 From: Oliver Strickson Date: Mon, 4 May 2020 18:40:09 +0100 Subject: [PATCH 024/155] Remove old update from whitelist script --- .../cloud-init-gitlab-external.template.yaml | 61 ------------------- 1 file changed, 61 deletions(-) diff --git a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml index 1311e8ed8f..b8b555bb76 100644 --- a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml +++ b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml @@ -86,67 +86,6 @@ write_files: permissions: "0600" content: | @ - # Script for ingressing repos to gitlab internal - - path: "/home//update_from_whitelist.py" - permissions: "0755" - content: | - import os - import json - import requests - import subprocess - from pathlib import Path - - home = str(Path.home()) - - with open("../whitelist", "r") as f: - whitelist = [line.strip().split(" ") for line in f.readlines()] - - whitelist = [{"url": repo[0], - "commit_sha": repo[1], - "gitlab_name": repo[2], - "gitlab_branch": repo[3]} for repo in whitelist] - - with open(f"{home}/.secrets/gitlab-internal-ip-address","r") as f: - gitlab_internal_ip = f.readlines()[0].strip() - - with open(f"{home}/.secrets/gitlab-internal-api-token","r") as f: - gitlab_token = f.readlines()[0].strip() - - gitlab_internal_url = "http://" + gitlab_internal_ip + "/api/v4/projects" - - gitlab_internal_projects = requests.get(gitlab_internal_url, - headers = {"Authorization": "Bearer " + gitlab_token}, - params = {"owned": True, "simple": True}) - - gitlab_internal_repo_names = [repo["name"].lower() for repo in gitlab_internal_projects.json()] - - for repo in whitelist: - repo_name = repo["gitlab_name"] - repo_path = os.path.join(repo_name) - if not os.path.exists(repo_path): - os.system("git clone " + repo["url"] + " " + repo["gitlab_name"]) - - os.chdir(repo["gitlab_name"]) - os.system("git fetch") - os.system("git checkout " + repo["commit_sha"]) - os.system("git branch -f " + repo["gitlab_branch"]) - os.system("git checkout " + repo["gitlab_branch"]) - - if repo_name.lower() not in gitlab_internal_repo_names: - response = requests.post(gitlab_internal_url, - headers = {"Authorization": "Bearer " + gitlab_token}, - data = {"name": repo_name, "visibility": "public"}) - - gitlab_internal_remote_url = response.json()["ssh_url_to_repo"] - assert(response.json()["name"] == repo_name) - - print("Adding remote gitlab-internal as " + gitlab_internal_remote_url) - os.system("git remote add gitlab-internal " + gitlab_internal_remote_url) - - - os.system("git push gitlab-internal " + repo["gitlab_branch"]) - - os.chdir("..") # Script for monitoring and accepting approval merge requests - path: "/home//check_merge_requests.py" permissions: "0755" From 22f8c4b94ff37c55219abdef304610646c43c53f Mon Sep 17 00:00:00 2001 From: Jack Roberts Date: Mon, 11 May 2020 10:23:38 +0100 Subject: [PATCH 025/155] add a line break to tidy up logging --- .../cloud_init/cloud-init-gitlab-external.template.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml index b8b555bb76..156d764fae 100644 --- a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml +++ b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml @@ -450,6 +450,7 @@ runcmd: attempt_counter=$((attempt_counter+1)) sleep 10 done + echo # -------------------------------- # SETUP ACCESS TO GITLAB EXTERNAL # -------------------------------- From b5110291fc65b4b6e69f76fbf80e97ee81638da1 Mon Sep 17 00:00:00 2001 From: nbarlowATI Date: Tue, 5 May 2020 14:20:33 +0100 Subject: [PATCH 026/155] add quotes around url in curl command --- .../administration/SRE_Upload_Git_Repo_to_GitlabExternal.ps1 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/deployment/administration/SRE_Upload_Git_Repo_to_GitlabExternal.ps1 b/deployment/administration/SRE_Upload_Git_Repo_to_GitlabExternal.ps1 index 2a5d19713c..d69b8a1b91 100644 --- a/deployment/administration/SRE_Upload_Git_Repo_to_GitlabExternal.ps1 +++ b/deployment/administration/SRE_Upload_Git_Repo_to_GitlabExternal.ps1 @@ -97,7 +97,7 @@ Add-LogMessage -Level Info "Got SAS token and URL $remoteUrl" $script = @" #!/bin/bash mkdir -p /zfiles -curl -X GET -o /zfiles/${zipFileName} ${remoteUrl} +curl -X GET -o /zfiles/${zipFileName} "${remoteUrl}" >& /zfiles/curl.log "@ Add-LogMessage -Level Info "[ ] Running remote script to download zipfile onto $gitlabExternalVmName" From bddd44d06a58e96174e59482d40814f7bfc2f466 Mon Sep 17 00:00:00 2001 From: nbarlowATI Date: Wed, 6 May 2020 17:47:11 +0100 Subject: [PATCH 027/155] add python script for creating projects and merge request in gitlab-external --- .../cloud-init-gitlab-external.template.yaml | 651 +++++++++++++++++- 1 file changed, 649 insertions(+), 2 deletions(-) diff --git a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml index 156d764fae..bf026b3366 100644 --- a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml +++ b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml @@ -86,6 +86,653 @@ write_files: permissions: "0600" content: | @ + # Script for creating projects and merge requests on gitlab-external + - path: "/home//zipfile_to_gitlab_project.py" + permissions: "0755" + content: | + import os + import shutil + import re + import requests + import subprocess + from zipfile import ZipFile, BadZipFile + from urllib.parse import quote as url_quote + from pathlib import Path + import logging + from logging.handlers import RotatingFileHandler + + logger = logging.getLogger("project_upload_logger") + logger.setLevel(logging.INFO) + formatter = logging.Formatter("%(asctime)s [%(levelname)s] %(message)s") + f_handler = RotatingFileHandler( + "upload_zipfiles_to_projects.log", maxBytes=5 * 1024 * 1024, backupCount=10 + ) + f_handler.setFormatter(formatter) + c_handler = logging.StreamHandler() + c_handler.setFormatter(formatter) + logger.addHandler(f_handler) + logger.addHandler(c_handler) + + + def unzip_zipfiles(zipfile_dir, tmp_repo_dir): + """ + Parameters + ========== + zipfile_dir: str, path to directory containing zipfiles + tmp_repo_dir: str, path to directory where zipfiles will be unzipped + + Returns + ======= + output_list: list of tuples + [(repo_name, commit_hash, desired_branch, unzipped-path),...] + + Note that the convention for the zipfile filenames is + __.zip + """ + output_list = [] + repo_commit_regex = re.compile("([-\w]+)_([a-f\d]+)_([\S]+).zip") + # tear down and recreate the directory where we will put the unpacked zip + shutil.rmtree(tmp_repo_dir, ignore_errors=True) + os.makedirs(tmp_repo_dir) + # look in a directory for zipfiles + zipfiles = os.listdir(zipfile_dir) + for zipfile in zipfiles: + filename_match = repo_commit_regex.search(zipfile) + if not filename_match: + print("Badly named zipfile! {}".format(zipfile)) + continue + repo_name, commit_hash, branch = filename_match.groups() + + # unzip + try: + zipfile_path = os.path.join(zipfile_dir, zipfile) + with ZipFile(zipfile_path, 'r') as zip_obj: + zip_obj.extractall(path=tmp_repo_dir) + # we should have made a new directory - find its name + unpacked_zips = os.listdir(tmp_repo_dir) + # should be one and only one directory in here + if len(unpacked_zips) != 1: + raise RuntimeError("Unexpected number of items in unpacked zip directory {}: {}".format(tmp_repo_dir, unpacked_zips)) + unpacked_location = os.path.join(tmp_repo_dir, unpacked_zips[0]) + output_list.append((repo_name, commit_hash, branch, unpacked_location)) + except(BadZipFile): + print("Bad zipfile: {}".format(zipfile)) + continue + return output_list + + + def get_gitlab_config(): + """ + Return a dictionary containing the base URL for the gitlab API, + the API token, the IP address, and the headers to go in any request + """ + home = str(Path.home()) + + with open(f"{home}/.secrets/gitlab-external-ip-address", "r") as f: + ip = f.readlines()[0].strip() + with open(f"{home}/.secrets/gitlab-external-api-token", "r") as f: + token = f.readlines()[0].strip() + + api_url = f"http://{ip}/api/v4/" + headers = {"Authorization": "Bearer " + token} + + return {"api_url": api_url, + "api_token": token, + "ip": ip, + "headers": headers} + + + def get_group_namespace_ids(gitlab_url, gitlab_token, + groups=["approval","unapproved"]): + """ + Find the namespace_id corresponding to the groups we're interested in, + e.g. 'approval' and 'unapproved'. + + Parameters + ========== + gitlab_url: str, base URL for the API + gitlab_token: str, API token for Gitlab + groups: list of string, the group names to look for. + + Returns + ======= + namespace_id_dict: dict, format {: } + + """ + namespaces_url = "{}/namespaces/".format(gitlab_url) + response = requests.get(namespaces_url, + headers = {"Authorization": "Bearer "+gitlab_token}) + if response.status_code != 200: + raise RuntimeError("Bad request: {} {}"\ + .format(response.status_code, response.content)) + gitlab_namespaces = response.json() + namespace_id_dict = {} + for namespace in gitlab_namespaces: + if namespace["kind"] == "group" and namespace["name"] in groups: + namespace_id_dict[namespace["name"]] = namespace["id"] + return namespace_id_dict + + + def get_gitlab_project_list(gitlab_url, gitlab_token): + """ + Get the list of Projects. + + Parameters + ========== + namespace_id: int, ID of the group ("unapproved" or "approval") + gitlab_url: str, base URL for the API + gitlab_token: str, API token. + + Returns + ======= + gitlab_projects: list of dictionaries. + """ + + # list currently existing projects on Gitlab + projects_url = "{}/projects/".format(gitlab_url) + response = requests.get(projects_url, + headers = {"Authorization": "Bearer "+gitlab_token}, + params = {"owned": True, "simple": True}) + + if response.status_code != 200: + raise RuntimeError("Bad request: {} {}"\ + .format(response.status_code, response.content)) + gitlab_projects = response.json() + return gitlab_projects + + + def check_if_project_exists(repo_name, namespace_id, gitlab_url, gitlab_token): + """ + Get a list of projects from the API - check if namespace_id (i.e. group) + and name match. + + Parameters + ========== + repo_name: str, name of our repository/project + namespace_id: int, id of our group ("unapproved" or "approval") + gitlab_url: str, base URL of Gitlab API + gitlab_token: str, API key for Gitlab API. + + Returns + ======= + bool, True if project exists, False otherwise. + """ + projects = get_gitlab_project_list(gitlab_url, gitlab_token) + for project in projects: + if project["name"] == repo_name and \ + project["namespace"]["id"] == namespace_id: + return True + return False + + + def get_project_info(repo_name, namespace_id, gitlab_url, gitlab_token): + """ + Check if project exists, and if so get its ID. Otherwise, create + it and return the ID. + + Parameters + ========== + repo_name: str, name of our repository/project + namespace_id: int, id of our group ("unapproved" or "approval") + gitlab_url: str, base URL of Gitlab API + gitlab_token: str, API key for Gitlab API. + + Returns + ======= + project_info: dict, containing info from the projects API endpoint + """ + already_exists = check_if_project_exists(repo_name, + namespace_id, + gitlab_url, + gitlab_token) + if already_exists: + projects = get_gitlab_project_list(gitlab_url, gitlab_token) + for project_info in projects: + if project_info["name"] == repo_name and \ + project_info["namespace"]["id"] == namespace_id: + return project_info + else: + project_info = create_project(repo_name, + namespace_id, + gitlab_url, + gitlab_token) + return project_info + + + def get_project_remote_url(repo_name, namespace_id, + gitlab_url, gitlab_token): + """ + Given the name of a repository and namespace_id (i.e. group, + "unapproved" or "approval"), either return the remote URL for project + matching the repo name, or create it if it doesn't exist already, + and again return the remote URL. + + Parameters + ========== + repo_name: str, name of the repository/project we're looking for. + namespace_id: int, the ID of the group ("unapproved" or "approval") + gitlab_url: str, base URL of the API + gitlab_token: str, API key + + Returns + ======= + gitlab_project_url: str, the URL to be set as the "remote". + """ + project_info = get_project_info(repo_name, namespace_id, + gitlab_url, gitlab_token) + + return project_info["ssh_url_to_repo"] + + + def get_project_id(repo_name, namespace_id, + gitlab_url, gitlab_token): + """ + Given the name of a repository and namespace_id (i.e. group, + "unapproved" or "approval"), either return the id of project + matching the repo name, or create it if it doesn't exist already, + and again return the id. + + Parameters + ========== + repo_name: str, name of the repository/project we're looking for. + namespace_id: int, the ID of the group ("unapproved" or "approval") + gitlab_url: str, base URL of the API + gitlab_token: str, API key + + Returns + ======= + gitlab_project_url: str, the URL to be set as the "remote". + """ + project_info = get_project_info(repo_name, namespace_id, + gitlab_url, gitlab_token) + + return project_info["id"] + + + def create_project(repo_name, namespace_id, gitlab_url, gitlab_token): + """ + Create empty project on gitlab, and return the corresponding remote URL. + + Parameters + ========== + repo_name: str, name of the repository/project + namespace_id: int, ID of the group ("unapproved" or "approved") + gitlab_url: str, base URL of the API + gitlab_token: str, API token. + + Returns + ======= + gitlab_project_info: dict, containing among other things, the name and + the remote URL for the project. + """ + projects_url = "{}projects/".format(gitlab_url) + response = requests.post(projects_url, + headers = {"Authorization": "Bearer "+gitlab_token}, + data = {"name": repo_name, + "visibility": "public", + "namespace_id": namespace_id} + ) + assert(response.json()["name"] == repo_name) + project_info = response.json() + print("Created project {} in namespace {}, project_id {}".\ + format(repo_name, namespace_id, project_info["id"])) + return project_info + + + def check_if_branch_exists(branch_name, + project_id, + gitlab_url, + gitlab_token): + """ + See if a branch with name branch_name already exists on this Project + + Parameters + ========== + branch_name: str, name of branch to look for + project_id: int, id of the project, obtained from projects API endpoint + gitlab_url: base URL of the Gitlab API + gitlab_token: API token for the Gitlab API + + Returns + ======= + branch_exists: bool, True if branch exists, False if not. + """ + branches_url = "{}/projects/{}/repository/branches".\ + format(gitlab_url, project_id) + response = requests.get(branches_url, + headers={"Authorization": "Bearer "+gitlab_token}) + if response.status_code != 200: + raise RuntimeError("Unable to check for branch {} on project {}: {}".\ + format(branch_name, project_id, r.content)) + branches = response.json() + for branch_info in branches: + if branch_info["name"] == branch_name: + return True + return False + + + + def create_branch(branch_name, + project_id, + gitlab_url, + gitlab_token, + reference_branch="master"): + """ + Create a new branch on an existing project. By default, use 'master' + as the reference branch from which to create the new one. + + Parameters + ========== + branch_name: str, the desired name of the new branch + project_id: int, the ID of the project, which is the "id" value in + the dictionary of project information returned when + creating a new project or listing existing ones. + gitlab_url: str, the base URL for the Gitlab API + gitlab_token: str, the Gitlab API token + + Returns + ======= + branch_info: dict, info about the branch from API endpoint + """ + # assume branch doesn't already exist - create it! + branch_url = "{}/projects/{}/repository/branches".format(gitlab_url, project_id) + response = requests.post(branch_url, + headers = {"Authorization": "Bearer "+gitlab_token}, + data = {"branch": branch_name, "ref": reference_branch}) + if response.status_code != 201: + raise RuntimeError("Problem creating branch {}: {}".format(branch_name, + response.content)) + branch_info = response.json() + assert branch_info["name"] == branch_name + return branch_info + + + def check_if_merge_request_exists(repo_name, + source_project_id, + source_branch, + target_project_id, + target_branch, + gitlab_url, gitlab_token): + """ + See if there is an existing merge request between the source and target + project/branch combinations. + + Parameters + ========== + repo_name: str, name of the repository + source_project_id: int, project_id for the unapproved project, obtainable + as the "ID" field of the json returned from the + projects API endpoint. + source_branch: str, name of the branch on source project, will typically + be the commit_hash from the original repo. + target_project_id: int, project_id for the "approval" group's project. + target_branch: str, name of branch on target project, will typically + be the desired branch name. + gitlab_url: str, base URL for the Gitlab API + gitlab_token: str, API token for the Gitlab API. + + Returns + ======= + bool, True if merge request already exists, False otherwise + """ + mr_url = "{}/projects/{}/merge_requests".format(gitlab_url, source_project_id) + response = requests.get(mr_url, + headers = {"Authorization": "Bearer "+gitlab_token}) + if response.status_code != 200: + raise RuntimeError("Request to check existence of MR failed: {} {}".\ + format(response.status_code, response.content)) + for mr in response.json(): + if mr["source_branch"] == source_branch and \ + mr["target_branch"] == target_branch: + print("Merge request {} -> {} already exists".\ + format(source_branch, target_branch)) + return True + return False + + + def create_merge_request(repo_name, + source_project_id, + source_branch, + target_project_id, + target_branch, + gitlab_url, gitlab_token): + + """ + Create a new MR, e.g. from the branch in the "unapproved" + group's project, to the branch in the "approval" + group's project. + + Parameters + ========== + repo_name: str, name of the repository + source_project_id: int, project_id for the unapproved project, obtainable + as the "ID" field of the json returned from the + projects API endpoint. + source_branch: str, name of the branch on source project, will typically + be the commit_hash from the original repo. + target_project_id: int, project_id for the "approval" group's project. + target_branch: str, name of branch on target project, will typically + be the desired branch name. + gitlab_url: str, base URL for the Gitlab API + gitlab_token: str, API token for the Gitlab API. + + Returns + ======= + mr_info: dict, the response from the API upon creating the Merge Request + """ + # first need to create a forked-from relationship between the projects + fork_url = "{}/projects/{}/fork/{}".format(gitlab_url, + source_project_id, + target_project_id) + response = requests.post(fork_url, + headers = {"Authorization": "Bearer "+gitlab_token}) + # status code 201 if fork relationship created, or 409 if already there + if (response.status_code != 201) and (response.status_code != 409): + raise RuntimeError("Unable to create fork relationship: {} {}".\ + format(response.status_code, response.content)) + + mr_url = "{}/projects/{}/merge_requests".format(gitlab_url, source_project_id) + title = "{}: {} to {}".format(repo_name, source_branch, target_branch) + response = requests.post(mr_url, + headers = {"Authorization": "Bearer "+gitlab_token}, + data = {"source_branch": source_branch, + "target_branch": target_branch, + "target_project_id": target_project_id, + "title": title}) + if response.status_code != 201: + raise RuntimeError("Problem creating Merge Request {} {} {}: {}"\ + .format(repo_name, source_branch,target_branch, + response.content)) + mr_info = response.json() + return mr_info + + + + + + def push_to_remote(path_to_unzipped_repo, commit_hash, remote_url): + """ + Run shell commands to convert the unzipped directory containing the + repository contents into a git repo, then commit it to a branch named + as the commit_hash. + + Parameters + ========== + path_to_unzipped_repo: str, the full directory path to the unzipped repo + commit_hash: str, original commit hash from the external git repo, will + be used as the name of the branch to push to + remote_url: str, the URL for this project on gitlab-external to be added + as a "remote". + """ + subprocess.run(["git","init"], cwd=path_to_unzipped_repo, check=True) + # Create a branch named after the original commit hash + subprocess.run(["git","checkout","-b",commit_hash], + cwd=path_to_unzipped_repo, check=True) + # Commit everything to this branch, also putting commit hash into message + subprocess.run(["git","add","."], cwd=path_to_unzipped_repo, check=True) + subprocess.run(["git","commit","-m",commit_hash], + cwd=path_to_unzipped_repo, check=True) + # add the remote_url as a remote called 'gitlab-external' + subprocess.run(["git","remote","add","gitlab-external",remote_url], + cwd=path_to_unzipped_repo, check=True) + # Push to gitlab external + subprocess.run(["git","push","--force","--all","gitlab-external"], + cwd=path_to_unzipped_repo, check=True) + + + def create_and_push_unapproved_project(repo_name, + namespace_id, + gitlab_url, + gitlab_token, + path_to_unzipped_repo, + commit_hash): + """ + We have unzipped a zipfile, and put the contents (i.e. the code we want + to push) in path_to_unzipped_project. + Now we create the project in the "unapproved" group on Gitlab, and push + to it. + + Parameters + ========== + repo_name: str, name of our repository/project + gitlab_url: str, the base URL of Gitlab API + gitlab_token: str, API token for Gitlab API + path_to_unzipped_repo: str, full directory path to code we want to commit + commit_hash: str, the commit hash from the original repo, to be used as + the name of the branch we'll push to + + Returns + ======= + project_id: int, ID of the project as returned by projects API endpoint + """ + # Get project ID - project will be created if it didn't already exist + project_id = get_project_id(repo_name, namespace_id, gitlab_url, gitlab_token) + assert project_id + # see if branch already exists with name=commit_hash + branch_exists = check_if_branch_exists(commit_hash, + project_id, + gitlab_url, + gitlab_token) + if branch_exists: + print("Branch {} already exists".format(commit_hash)) + # already exists - do nothing + return project_id + # otherwise we need to commit code to it and push + remote_url = get_project_remote_url(repo_name, namespace_id, + gitlab_url, gitlab_token) + print("remote URL for {} is {}".format(repo_name, remote_url)) + + push_to_remote(path_to_unzipped_repo, commit_hash, remote_url) + # Return the project_id, to use in merge request + return project_id + + + def create_approved_project_branch(repo_name, + branch_name, + namespace_id, + gitlab_url, + gitlab_token): + """ + Create a new branch (and a new project if it doesn't already exist) + owned by the "approval" group. This will be the target for the merge + request. + + Parameters + ========== + repo_name: str, repository name + gitlab_url: str, base URL for Gitlab API + gitlab_token: str, API token for Gitlab API + branch_name: str, the desired branch name. + + Returns + ======= + project_id: int, the "ID" field in the info from projects API endpoint + """ + # get the project ID - project will be created if it doesn't already exist + project_id = get_project_id(repo_name, namespace_id, gitlab_url, gitlab_token) + assert project_id + + # create the branch if it doesn't already exist + branch_exists = check_if_branch_exists(branch_name, + project_id, + gitlab_url, + gitlab_token) + if not branch_exists: + branch_info = create_branch(branch_name, + project_id, + gitlab_url, + gitlab_token) + assert branch_info["name"] == branch_name + # return the ID of this project so we can use it in merge request + return project_id + + + def main(): + # create a directory to unpack the zipfiles into + TMP_REPO_DIR = "/tmp/repos" + os.makedirs(TMP_REPO_DIR, exist_ok=True) + # get the gitlab config + config = get_gitlab_config() + ZIPFILE_DIR = "/zfiles" + + # unzip the zipfiles, and retrieve a list of tuples describing + # (repo_name, commit_hash, desired_branch, unzipped_location) + unzipped_repos = unzip_zipfiles(ZIPFILE_DIR, TMP_REPO_DIR) + + # get the namespace_ids of our "approval" and "unapproved" groups + GROUPS = ["unapproved","approval"] + namespace_ids = get_group_namespace_ids(config["api_url"], + config["api_token"], + GROUPS) + + # loop over all our newly unzipped repositories + for repo in unzipped_repos: + # unpack tuple + repo_name, commit_hash, branch_name, location = repo + print("Unpacked {} {} {}".format(repo_name, commit_hash, branch_name)) + src_project_id = create_and_push_unapproved_project(repo_name, + namespace_ids[GROUPS[0]], + config["api_url"], + config["api_token"], + location, + commit_hash) + print("Created project {}/{} branch {}".\ + format(GROUPS[0],repo_name, commit_hash)) + + # create project and branch on approved repo + target_project_id = create_approved_project_branch(repo_name, + branch_name, + namespace_ids[GROUPS[1]], + config["api_url"], + config["api_token"]) + print("Created project {}/{} branch {}".\ + format(GROUPS[1],repo_name, branch_name)) + + mr_exists = check_if_merge_request_exists(repo_name, + src_project_id, + commit_hash, + target_project_id, + branch_name, + config["api_url"], + config["api_token"]) + if mr_exists: + print("Merge request {} -> {} already exists. skipping".\ + format(commit_hash, branch_name)) + else: + # create merge request + create_merge_request(repo_name, + src_project_id, + commit_hash, + target_project_id, + branch_name, + config["api_url"], + config["api_token"]) + print("Created merge request {} -> {}".\ + format(commit_hash, branch_name)) + + if __name__ == "__main__": + main() + # Script for monitoring and accepting approval merge requests - path: "/home//check_merge_requests.py" permissions: "0755" @@ -434,7 +1081,7 @@ runcmd: chown : "/home//.ssh/known_hosts" # -------------------------------- # WAIT FOR GITLAB EXTERNAL HEALTH CHECK - # -------------------------------- + # -------------------------------- - | attempt_counter=0 max_attempts=60 @@ -470,7 +1117,7 @@ runcmd: # Create groups for storing unapproved and approval repos - | curl --header "Authorization: Bearer " --data "name=approval&path=approval&visibility=public" /api/v4/groups; - curl --header "Authorization: Bearer " --data "name=unapproved&path=unapproved&visibility=public" /api/v4/groups + curl --header "Authorization: Bearer " --data "name=unapproved&path=unapproved&visibility=public" /api/v4/groups # -------------------------------- # FINAL SETUP # -------------------------------- From e3b99ac7b72576c897fa4f5bfa2edb6e50c79ee3 Mon Sep 17 00:00:00 2001 From: nbarlowATI Date: Wed, 6 May 2020 17:47:28 +0100 Subject: [PATCH 028/155] don't write curl output to file --- .../administration/SRE_Upload_Git_Repo_to_GitlabExternal.ps1 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/deployment/administration/SRE_Upload_Git_Repo_to_GitlabExternal.ps1 b/deployment/administration/SRE_Upload_Git_Repo_to_GitlabExternal.ps1 index d69b8a1b91..1c4ec42a70 100644 --- a/deployment/administration/SRE_Upload_Git_Repo_to_GitlabExternal.ps1 +++ b/deployment/administration/SRE_Upload_Git_Repo_to_GitlabExternal.ps1 @@ -97,7 +97,7 @@ Add-LogMessage -Level Info "Got SAS token and URL $remoteUrl" $script = @" #!/bin/bash mkdir -p /zfiles -curl -X GET -o /zfiles/${zipFileName} "${remoteUrl}" >& /zfiles/curl.log +curl -X GET -o /zfiles/${zipFileName} "${remoteUrl}" "@ Add-LogMessage -Level Info "[ ] Running remote script to download zipfile onto $gitlabExternalVmName" From d995c471a820e8decd794a2d72edf9b2f1ee9654 Mon Sep 17 00:00:00 2001 From: nbarlowATI Date: Wed, 6 May 2020 17:48:26 +0100 Subject: [PATCH 029/155] add container name for blob storage for zipfiles from git repos --- deployment/common/Configuration.psm1 | 1 + environment_configs/full/sre_testasandbox_full_config.json | 3 ++- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/deployment/common/Configuration.psm1 b/deployment/common/Configuration.psm1 index 4ed8e7611c..1fa6e3a738 100644 --- a/deployment/common/Configuration.psm1 +++ b/deployment/common/Configuration.psm1 @@ -375,6 +375,7 @@ function Add-SreConfig { artifacts = [ordered]@{ rg = $storageRg accountName = "sre$($shm.id)artifacts${storageSuffix}".ToLower() | TrimToLength 24 + gitlabAirlockContainerName = "sre$($shm.id)gitlabairlock${storageSuffix}" } bootdiagnostics = [ordered]@{ rg = $storageRg diff --git a/environment_configs/full/sre_testasandbox_full_config.json b/environment_configs/full/sre_testasandbox_full_config.json index d1050c5b92..fcb5040bf9 100644 --- a/environment_configs/full/sre_testasandbox_full_config.json +++ b/environment_configs/full/sre_testasandbox_full_config.json @@ -207,7 +207,8 @@ "storage": { "artifacts": { "rg": "RG_SRE_ARTIFACTS", - "accountName": "sreartifactszvmwtqkigrld" + "accountName": "sreartifactszvmwtqkigrad", + "gitlabAirlockContainerName": "gitlabairlock" }, "bootdiagnostics": { "rg": "RG_SRE_ARTIFACTS", From c831b0e1031358541e7822998c12d6098e26ab48 Mon Sep 17 00:00:00 2001 From: nbarlowATI Date: Thu, 7 May 2020 09:07:54 +0100 Subject: [PATCH 030/155] Move python script content out of cloud-init yaml file into scripts/ directory --- .../cloud-init-gitlab-external.template.yaml | 919 +----------------- .../scripts/check_merge_requests.py | 274 ++++++ .../scripts/zipfile_to_gitlab_project.py | 663 +++++++++++++ 3 files changed, 939 insertions(+), 917 deletions(-) create mode 100644 deployment/secure_research_environment/cloud_init/scripts/check_merge_requests.py create mode 100644 deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py diff --git a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml index bf026b3366..d4552004f7 100644 --- a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml +++ b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml @@ -90,927 +90,12 @@ write_files: - path: "/home//zipfile_to_gitlab_project.py" permissions: "0755" content: | - import os - import shutil - import re - import requests - import subprocess - from zipfile import ZipFile, BadZipFile - from urllib.parse import quote as url_quote - from pathlib import Path - import logging - from logging.handlers import RotatingFileHandler - - logger = logging.getLogger("project_upload_logger") - logger.setLevel(logging.INFO) - formatter = logging.Formatter("%(asctime)s [%(levelname)s] %(message)s") - f_handler = RotatingFileHandler( - "upload_zipfiles_to_projects.log", maxBytes=5 * 1024 * 1024, backupCount=10 - ) - f_handler.setFormatter(formatter) - c_handler = logging.StreamHandler() - c_handler.setFormatter(formatter) - logger.addHandler(f_handler) - logger.addHandler(c_handler) - - - def unzip_zipfiles(zipfile_dir, tmp_repo_dir): - """ - Parameters - ========== - zipfile_dir: str, path to directory containing zipfiles - tmp_repo_dir: str, path to directory where zipfiles will be unzipped - - Returns - ======= - output_list: list of tuples - [(repo_name, commit_hash, desired_branch, unzipped-path),...] - - Note that the convention for the zipfile filenames is - __.zip - """ - output_list = [] - repo_commit_regex = re.compile("([-\w]+)_([a-f\d]+)_([\S]+).zip") - # tear down and recreate the directory where we will put the unpacked zip - shutil.rmtree(tmp_repo_dir, ignore_errors=True) - os.makedirs(tmp_repo_dir) - # look in a directory for zipfiles - zipfiles = os.listdir(zipfile_dir) - for zipfile in zipfiles: - filename_match = repo_commit_regex.search(zipfile) - if not filename_match: - print("Badly named zipfile! {}".format(zipfile)) - continue - repo_name, commit_hash, branch = filename_match.groups() - - # unzip - try: - zipfile_path = os.path.join(zipfile_dir, zipfile) - with ZipFile(zipfile_path, 'r') as zip_obj: - zip_obj.extractall(path=tmp_repo_dir) - # we should have made a new directory - find its name - unpacked_zips = os.listdir(tmp_repo_dir) - # should be one and only one directory in here - if len(unpacked_zips) != 1: - raise RuntimeError("Unexpected number of items in unpacked zip directory {}: {}".format(tmp_repo_dir, unpacked_zips)) - unpacked_location = os.path.join(tmp_repo_dir, unpacked_zips[0]) - output_list.append((repo_name, commit_hash, branch, unpacked_location)) - except(BadZipFile): - print("Bad zipfile: {}".format(zipfile)) - continue - return output_list - - - def get_gitlab_config(): - """ - Return a dictionary containing the base URL for the gitlab API, - the API token, the IP address, and the headers to go in any request - """ - home = str(Path.home()) - - with open(f"{home}/.secrets/gitlab-external-ip-address", "r") as f: - ip = f.readlines()[0].strip() - with open(f"{home}/.secrets/gitlab-external-api-token", "r") as f: - token = f.readlines()[0].strip() - - api_url = f"http://{ip}/api/v4/" - headers = {"Authorization": "Bearer " + token} - - return {"api_url": api_url, - "api_token": token, - "ip": ip, - "headers": headers} - - - def get_group_namespace_ids(gitlab_url, gitlab_token, - groups=["approval","unapproved"]): - """ - Find the namespace_id corresponding to the groups we're interested in, - e.g. 'approval' and 'unapproved'. - - Parameters - ========== - gitlab_url: str, base URL for the API - gitlab_token: str, API token for Gitlab - groups: list of string, the group names to look for. - - Returns - ======= - namespace_id_dict: dict, format {: } - - """ - namespaces_url = "{}/namespaces/".format(gitlab_url) - response = requests.get(namespaces_url, - headers = {"Authorization": "Bearer "+gitlab_token}) - if response.status_code != 200: - raise RuntimeError("Bad request: {} {}"\ - .format(response.status_code, response.content)) - gitlab_namespaces = response.json() - namespace_id_dict = {} - for namespace in gitlab_namespaces: - if namespace["kind"] == "group" and namespace["name"] in groups: - namespace_id_dict[namespace["name"]] = namespace["id"] - return namespace_id_dict - - - def get_gitlab_project_list(gitlab_url, gitlab_token): - """ - Get the list of Projects. - - Parameters - ========== - namespace_id: int, ID of the group ("unapproved" or "approval") - gitlab_url: str, base URL for the API - gitlab_token: str, API token. - - Returns - ======= - gitlab_projects: list of dictionaries. - """ - - # list currently existing projects on Gitlab - projects_url = "{}/projects/".format(gitlab_url) - response = requests.get(projects_url, - headers = {"Authorization": "Bearer "+gitlab_token}, - params = {"owned": True, "simple": True}) - - if response.status_code != 200: - raise RuntimeError("Bad request: {} {}"\ - .format(response.status_code, response.content)) - gitlab_projects = response.json() - return gitlab_projects - - - def check_if_project_exists(repo_name, namespace_id, gitlab_url, gitlab_token): - """ - Get a list of projects from the API - check if namespace_id (i.e. group) - and name match. - - Parameters - ========== - repo_name: str, name of our repository/project - namespace_id: int, id of our group ("unapproved" or "approval") - gitlab_url: str, base URL of Gitlab API - gitlab_token: str, API key for Gitlab API. - - Returns - ======= - bool, True if project exists, False otherwise. - """ - projects = get_gitlab_project_list(gitlab_url, gitlab_token) - for project in projects: - if project["name"] == repo_name and \ - project["namespace"]["id"] == namespace_id: - return True - return False - - - def get_project_info(repo_name, namespace_id, gitlab_url, gitlab_token): - """ - Check if project exists, and if so get its ID. Otherwise, create - it and return the ID. - - Parameters - ========== - repo_name: str, name of our repository/project - namespace_id: int, id of our group ("unapproved" or "approval") - gitlab_url: str, base URL of Gitlab API - gitlab_token: str, API key for Gitlab API. - - Returns - ======= - project_info: dict, containing info from the projects API endpoint - """ - already_exists = check_if_project_exists(repo_name, - namespace_id, - gitlab_url, - gitlab_token) - if already_exists: - projects = get_gitlab_project_list(gitlab_url, gitlab_token) - for project_info in projects: - if project_info["name"] == repo_name and \ - project_info["namespace"]["id"] == namespace_id: - return project_info - else: - project_info = create_project(repo_name, - namespace_id, - gitlab_url, - gitlab_token) - return project_info - - - def get_project_remote_url(repo_name, namespace_id, - gitlab_url, gitlab_token): - """ - Given the name of a repository and namespace_id (i.e. group, - "unapproved" or "approval"), either return the remote URL for project - matching the repo name, or create it if it doesn't exist already, - and again return the remote URL. - - Parameters - ========== - repo_name: str, name of the repository/project we're looking for. - namespace_id: int, the ID of the group ("unapproved" or "approval") - gitlab_url: str, base URL of the API - gitlab_token: str, API key - - Returns - ======= - gitlab_project_url: str, the URL to be set as the "remote". - """ - project_info = get_project_info(repo_name, namespace_id, - gitlab_url, gitlab_token) - - return project_info["ssh_url_to_repo"] - - - def get_project_id(repo_name, namespace_id, - gitlab_url, gitlab_token): - """ - Given the name of a repository and namespace_id (i.e. group, - "unapproved" or "approval"), either return the id of project - matching the repo name, or create it if it doesn't exist already, - and again return the id. - - Parameters - ========== - repo_name: str, name of the repository/project we're looking for. - namespace_id: int, the ID of the group ("unapproved" or "approval") - gitlab_url: str, base URL of the API - gitlab_token: str, API key - - Returns - ======= - gitlab_project_url: str, the URL to be set as the "remote". - """ - project_info = get_project_info(repo_name, namespace_id, - gitlab_url, gitlab_token) - - return project_info["id"] - - - def create_project(repo_name, namespace_id, gitlab_url, gitlab_token): - """ - Create empty project on gitlab, and return the corresponding remote URL. - - Parameters - ========== - repo_name: str, name of the repository/project - namespace_id: int, ID of the group ("unapproved" or "approved") - gitlab_url: str, base URL of the API - gitlab_token: str, API token. - - Returns - ======= - gitlab_project_info: dict, containing among other things, the name and - the remote URL for the project. - """ - projects_url = "{}projects/".format(gitlab_url) - response = requests.post(projects_url, - headers = {"Authorization": "Bearer "+gitlab_token}, - data = {"name": repo_name, - "visibility": "public", - "namespace_id": namespace_id} - ) - assert(response.json()["name"] == repo_name) - project_info = response.json() - print("Created project {} in namespace {}, project_id {}".\ - format(repo_name, namespace_id, project_info["id"])) - return project_info - - - def check_if_branch_exists(branch_name, - project_id, - gitlab_url, - gitlab_token): - """ - See if a branch with name branch_name already exists on this Project - - Parameters - ========== - branch_name: str, name of branch to look for - project_id: int, id of the project, obtained from projects API endpoint - gitlab_url: base URL of the Gitlab API - gitlab_token: API token for the Gitlab API - - Returns - ======= - branch_exists: bool, True if branch exists, False if not. - """ - branches_url = "{}/projects/{}/repository/branches".\ - format(gitlab_url, project_id) - response = requests.get(branches_url, - headers={"Authorization": "Bearer "+gitlab_token}) - if response.status_code != 200: - raise RuntimeError("Unable to check for branch {} on project {}: {}".\ - format(branch_name, project_id, r.content)) - branches = response.json() - for branch_info in branches: - if branch_info["name"] == branch_name: - return True - return False - - - - def create_branch(branch_name, - project_id, - gitlab_url, - gitlab_token, - reference_branch="master"): - """ - Create a new branch on an existing project. By default, use 'master' - as the reference branch from which to create the new one. - - Parameters - ========== - branch_name: str, the desired name of the new branch - project_id: int, the ID of the project, which is the "id" value in - the dictionary of project information returned when - creating a new project or listing existing ones. - gitlab_url: str, the base URL for the Gitlab API - gitlab_token: str, the Gitlab API token - - Returns - ======= - branch_info: dict, info about the branch from API endpoint - """ - # assume branch doesn't already exist - create it! - branch_url = "{}/projects/{}/repository/branches".format(gitlab_url, project_id) - response = requests.post(branch_url, - headers = {"Authorization": "Bearer "+gitlab_token}, - data = {"branch": branch_name, "ref": reference_branch}) - if response.status_code != 201: - raise RuntimeError("Problem creating branch {}: {}".format(branch_name, - response.content)) - branch_info = response.json() - assert branch_info["name"] == branch_name - return branch_info - - - def check_if_merge_request_exists(repo_name, - source_project_id, - source_branch, - target_project_id, - target_branch, - gitlab_url, gitlab_token): - """ - See if there is an existing merge request between the source and target - project/branch combinations. - - Parameters - ========== - repo_name: str, name of the repository - source_project_id: int, project_id for the unapproved project, obtainable - as the "ID" field of the json returned from the - projects API endpoint. - source_branch: str, name of the branch on source project, will typically - be the commit_hash from the original repo. - target_project_id: int, project_id for the "approval" group's project. - target_branch: str, name of branch on target project, will typically - be the desired branch name. - gitlab_url: str, base URL for the Gitlab API - gitlab_token: str, API token for the Gitlab API. - - Returns - ======= - bool, True if merge request already exists, False otherwise - """ - mr_url = "{}/projects/{}/merge_requests".format(gitlab_url, source_project_id) - response = requests.get(mr_url, - headers = {"Authorization": "Bearer "+gitlab_token}) - if response.status_code != 200: - raise RuntimeError("Request to check existence of MR failed: {} {}".\ - format(response.status_code, response.content)) - for mr in response.json(): - if mr["source_branch"] == source_branch and \ - mr["target_branch"] == target_branch: - print("Merge request {} -> {} already exists".\ - format(source_branch, target_branch)) - return True - return False - - - def create_merge_request(repo_name, - source_project_id, - source_branch, - target_project_id, - target_branch, - gitlab_url, gitlab_token): - - """ - Create a new MR, e.g. from the branch in the "unapproved" - group's project, to the branch in the "approval" - group's project. - - Parameters - ========== - repo_name: str, name of the repository - source_project_id: int, project_id for the unapproved project, obtainable - as the "ID" field of the json returned from the - projects API endpoint. - source_branch: str, name of the branch on source project, will typically - be the commit_hash from the original repo. - target_project_id: int, project_id for the "approval" group's project. - target_branch: str, name of branch on target project, will typically - be the desired branch name. - gitlab_url: str, base URL for the Gitlab API - gitlab_token: str, API token for the Gitlab API. - - Returns - ======= - mr_info: dict, the response from the API upon creating the Merge Request - """ - # first need to create a forked-from relationship between the projects - fork_url = "{}/projects/{}/fork/{}".format(gitlab_url, - source_project_id, - target_project_id) - response = requests.post(fork_url, - headers = {"Authorization": "Bearer "+gitlab_token}) - # status code 201 if fork relationship created, or 409 if already there - if (response.status_code != 201) and (response.status_code != 409): - raise RuntimeError("Unable to create fork relationship: {} {}".\ - format(response.status_code, response.content)) - - mr_url = "{}/projects/{}/merge_requests".format(gitlab_url, source_project_id) - title = "{}: {} to {}".format(repo_name, source_branch, target_branch) - response = requests.post(mr_url, - headers = {"Authorization": "Bearer "+gitlab_token}, - data = {"source_branch": source_branch, - "target_branch": target_branch, - "target_project_id": target_project_id, - "title": title}) - if response.status_code != 201: - raise RuntimeError("Problem creating Merge Request {} {} {}: {}"\ - .format(repo_name, source_branch,target_branch, - response.content)) - mr_info = response.json() - return mr_info - - - - - - def push_to_remote(path_to_unzipped_repo, commit_hash, remote_url): - """ - Run shell commands to convert the unzipped directory containing the - repository contents into a git repo, then commit it to a branch named - as the commit_hash. - - Parameters - ========== - path_to_unzipped_repo: str, the full directory path to the unzipped repo - commit_hash: str, original commit hash from the external git repo, will - be used as the name of the branch to push to - remote_url: str, the URL for this project on gitlab-external to be added - as a "remote". - """ - subprocess.run(["git","init"], cwd=path_to_unzipped_repo, check=True) - # Create a branch named after the original commit hash - subprocess.run(["git","checkout","-b",commit_hash], - cwd=path_to_unzipped_repo, check=True) - # Commit everything to this branch, also putting commit hash into message - subprocess.run(["git","add","."], cwd=path_to_unzipped_repo, check=True) - subprocess.run(["git","commit","-m",commit_hash], - cwd=path_to_unzipped_repo, check=True) - # add the remote_url as a remote called 'gitlab-external' - subprocess.run(["git","remote","add","gitlab-external",remote_url], - cwd=path_to_unzipped_repo, check=True) - # Push to gitlab external - subprocess.run(["git","push","--force","--all","gitlab-external"], - cwd=path_to_unzipped_repo, check=True) - - - def create_and_push_unapproved_project(repo_name, - namespace_id, - gitlab_url, - gitlab_token, - path_to_unzipped_repo, - commit_hash): - """ - We have unzipped a zipfile, and put the contents (i.e. the code we want - to push) in path_to_unzipped_project. - Now we create the project in the "unapproved" group on Gitlab, and push - to it. - - Parameters - ========== - repo_name: str, name of our repository/project - gitlab_url: str, the base URL of Gitlab API - gitlab_token: str, API token for Gitlab API - path_to_unzipped_repo: str, full directory path to code we want to commit - commit_hash: str, the commit hash from the original repo, to be used as - the name of the branch we'll push to - - Returns - ======= - project_id: int, ID of the project as returned by projects API endpoint - """ - # Get project ID - project will be created if it didn't already exist - project_id = get_project_id(repo_name, namespace_id, gitlab_url, gitlab_token) - assert project_id - # see if branch already exists with name=commit_hash - branch_exists = check_if_branch_exists(commit_hash, - project_id, - gitlab_url, - gitlab_token) - if branch_exists: - print("Branch {} already exists".format(commit_hash)) - # already exists - do nothing - return project_id - # otherwise we need to commit code to it and push - remote_url = get_project_remote_url(repo_name, namespace_id, - gitlab_url, gitlab_token) - print("remote URL for {} is {}".format(repo_name, remote_url)) - - push_to_remote(path_to_unzipped_repo, commit_hash, remote_url) - # Return the project_id, to use in merge request - return project_id - - - def create_approved_project_branch(repo_name, - branch_name, - namespace_id, - gitlab_url, - gitlab_token): - """ - Create a new branch (and a new project if it doesn't already exist) - owned by the "approval" group. This will be the target for the merge - request. - - Parameters - ========== - repo_name: str, repository name - gitlab_url: str, base URL for Gitlab API - gitlab_token: str, API token for Gitlab API - branch_name: str, the desired branch name. - - Returns - ======= - project_id: int, the "ID" field in the info from projects API endpoint - """ - # get the project ID - project will be created if it doesn't already exist - project_id = get_project_id(repo_name, namespace_id, gitlab_url, gitlab_token) - assert project_id - - # create the branch if it doesn't already exist - branch_exists = check_if_branch_exists(branch_name, - project_id, - gitlab_url, - gitlab_token) - if not branch_exists: - branch_info = create_branch(branch_name, - project_id, - gitlab_url, - gitlab_token) - assert branch_info["name"] == branch_name - # return the ID of this project so we can use it in merge request - return project_id - - - def main(): - # create a directory to unpack the zipfiles into - TMP_REPO_DIR = "/tmp/repos" - os.makedirs(TMP_REPO_DIR, exist_ok=True) - # get the gitlab config - config = get_gitlab_config() - ZIPFILE_DIR = "/zfiles" - - # unzip the zipfiles, and retrieve a list of tuples describing - # (repo_name, commit_hash, desired_branch, unzipped_location) - unzipped_repos = unzip_zipfiles(ZIPFILE_DIR, TMP_REPO_DIR) - - # get the namespace_ids of our "approval" and "unapproved" groups - GROUPS = ["unapproved","approval"] - namespace_ids = get_group_namespace_ids(config["api_url"], - config["api_token"], - GROUPS) - - # loop over all our newly unzipped repositories - for repo in unzipped_repos: - # unpack tuple - repo_name, commit_hash, branch_name, location = repo - print("Unpacked {} {} {}".format(repo_name, commit_hash, branch_name)) - src_project_id = create_and_push_unapproved_project(repo_name, - namespace_ids[GROUPS[0]], - config["api_url"], - config["api_token"], - location, - commit_hash) - print("Created project {}/{} branch {}".\ - format(GROUPS[0],repo_name, commit_hash)) - - # create project and branch on approved repo - target_project_id = create_approved_project_branch(repo_name, - branch_name, - namespace_ids[GROUPS[1]], - config["api_url"], - config["api_token"]) - print("Created project {}/{} branch {}".\ - format(GROUPS[1],repo_name, branch_name)) - - mr_exists = check_if_merge_request_exists(repo_name, - src_project_id, - commit_hash, - target_project_id, - branch_name, - config["api_url"], - config["api_token"]) - if mr_exists: - print("Merge request {} -> {} already exists. skipping".\ - format(commit_hash, branch_name)) - else: - # create merge request - create_merge_request(repo_name, - src_project_id, - commit_hash, - target_project_id, - branch_name, - config["api_url"], - config["api_token"]) - print("Created merge request {} -> {}".\ - format(commit_hash, branch_name)) - - if __name__ == "__main__": - main() - + # Script for monitoring and accepting approval merge requests - path: "/home//check_merge_requests.py" permissions: "0755" content: | - import requests - import subprocess - from urllib.parse import quote as url_quote - from pathlib import Path - import logging - from logging.handlers import RotatingFileHandler - - logger = logging.getLogger("merge_requests_logger") - logger.setLevel(logging.INFO) - formatter = logging.Formatter("%(asctime)s [%(levelname)s] %(message)s") - f_handler = RotatingFileHandler( - "check_merge_requests.log", maxBytes=5 * 1024 * 1024, backupCount=10 - ) - f_handler.setFormatter(formatter) - c_handler = logging.StreamHandler() - c_handler.setFormatter(formatter) - logger.addHandler(f_handler) - logger.addHandler(c_handler) - - global HOME - global GL_INTERNAL_IP - global GL_INTERNAL_TOKEN - global GL_INTERNAL_AUTH_HEADER - - HOME = str(Path.home()) - - with open(f"{HOME}/.secrets/gitlab-internal-ip-address", "r") as f: - GL_INTERNAL_IP = f.readlines()[0].strip() - - GL_INTERNAL_URL = "http://" + GL_INTERNAL_IP + "/api/v4" - - with open(f"{HOME}/.secrets/gitlab-internal-api-token", "r") as f: - GL_INTERNAL_TOKEN = f.readlines()[0].strip() - - GL_INTERNAL_AUTH_HEADER = {"Authorization": "Bearer " + GL_INTERNAL_TOKEN} - - - def internal_project_exists(repo_name): - """Given a string (the name of a repo - not a URL), returns a pair - (exists, url): - - exists: boolean - does repo_name exist on GITLAB-INTERNAL? - - url: str - the ssh url to the repo (when 'exists' is true) - """ - - # build url-encoded repo_name - repo_path_encoded = url_quote("ingress/" + repo_name, safe='') - - # Does repo_name exist on GITLAB-INTERNAL? - response = requests.get(GL_INTERNAL_URL + '/projects/' + repo_path_encoded, - headers=GL_INTERNAL_AUTH_HEADER) - - if response.status_code == 404: - return (False, "") - elif response.status_code == 200: - return (True, response.json()["ssh_url_to_repo"]) - else: - # Not using `response.raise_for_status()`, since we also want - # to raise an exception on unexpected "successful" responses - # (not 200) - raise requests.HTTPError("Unexpected response: " + response.reason - + ", content: " + response.text) - - - def internal_update_repo(gh_url, repo_name): - """Takes a GitHub URL, `gh_url`, which should be the URL to the - "APPROVED" repo, clones it and pushes all branches to the repo - `repo_name` owned by 'ingress' on GITLAB-INTERNAL, creating it - there first if it doesn't exist. - """ - # clone the repo from gh_url (on GITLAB-EXTERNAL), removing any of - # the same name first (simpler than checking if it exists, has the - # same remote and pulling) - subprocess.run(["rm", "-rf", repo_name], check=True) - subprocess.run(["git", "clone", gh_url, repo_name], check=True) - - project_exists, gl_internal_repo_url = internal_project_exists(repo_name) - - # create the project if it doesn't exist - if not project_exists: - print("Creating: " + repo_name) - response = requests.post(GL_INTERNAL_URL + '/projects', - headers=GL_INTERNAL_AUTH_HEADER, - data={"name": repo_name, - "visibility": "public"}) - response.raise_for_status() - assert(response.json()["path_with_namespace"] == "ingress/" + repo_name) - - gl_internal_repo_url = response.json()["ssh_url_to_repo"] - - # Set the remote - subprocess.run(["git", "remote", "add", "gitlab-internal", - gl_internal_repo_url], cwd=repo_name, check=True) - - # Force push current contents of all branches - subprocess.run(["git", "push", "--force", "--all", - "gitlab-internal"], cwd=repo_name, check=True) - - - def get_request(endpoint, headers, params=None): - r = requests.get(endpoint, headers=headers, params=params) - if r.ok: - return r.json() - else: - raise ValueError( - f"Request failed: URL {endpoint}, CODE {r.status_code}, CONTENT {r.content}" - ) - - - def put_request(endpoint, headers, params=None): - r = requests.put(endpoint, headers=headers, params=params) - if r.ok: - return r.json() - else: - raise ValueError( - f"Request failed: URL {endpoint}, CODE {r.status_code}, CONTENT {r.content}" - ) - - - def get_gitlab_config(server="external"): - home = str(Path.home()) - - if server == "external": - with open(f"{home}/.secrets/gitlab-external-ip-address", "r") as f: - ip = f.readlines()[0].strip() - with open(f"{home}/.secrets/gitlab-external-api-token", "r") as f: - token = f.readlines()[0].strip() - elif server == "internal": - with open(f"{home}/.secrets/gitlab-internal-ip-address", "r") as f: - ip = f.readlines()[0].strip() - with open(f"{home}/.secrets/gitlab-internal-api-token", "r") as f: - token = f.readlines()[0].strip() - else: - raise ValueError("Server must be external or internal") - - api_url = f"http://{ip}/api/v4/" - headers = {"Authorization": "Bearer " + token} - - return {"api_url": api_url, "api_token": token, "ip": ip, "headers": headers} - - - def get_group_id(group_name, config): - endpoint = config["api_url"] + "groups" - response = get_request(endpoint, headers=config["headers"]) - for group in response: - if group["name"] == group_name: - return group["id"] - raise ValueError(f"{group_name} not found in groups.") - - - def get_project(project_id, config): - endpoint = config["api_url"] + f"projects/{project_id}" - project = get_request(endpoint, headers=config["headers"]) - return project - - - def get_merge_requests_for_approval(config): - group = get_group_id("approval", config) - endpoint = config["api_url"] + f"/groups/{group}/merge_requests" - response = get_request( - endpoint, headers=config["headers"], params={"state": "opened"} - ) - return response - - - def count_unresolved_mr_discussions(mr, config): - if mr["user_notes_count"] == 0: - return 0 - project_id = mr["project_id"] - mr_iid = mr["iid"] - endpoint = ( - config["api_url"] + f"projects/{project_id}/merge_requests/{mr_iid}/discussions" - ) - discussions = get_request(endpoint, headers=config["headers"]) - if len(discussions) == 0: - return 0 - else: - n_unresolved = 0 - for d in discussions: - for n in d["notes"]: - if n["resolvable"] is True and n["resolved"] is False: - n_unresolved += 1 - return n_unresolved - - - def accept_merge_request(mr, config): - project_id = mr["project_id"] - mr_iid = mr["iid"] - endpoint = ( - config["api_url"] + f"projects/{project_id}/merge_requests/{mr_iid}/merge" - ) - return put_request(endpoint, headers=config["headers"]) - - - def check_merge_requests(): - logger.info(f"STARTING RUN") - - try: - config = get_gitlab_config(server="external") - except Exception as e: - logger.critical(f"Failed to load gitlab secrets: {e}") - return - - logger.info("Getting open merge requests for approval") - try: - merge_requests = get_merge_requests_for_approval(config) - except Exception as e: - logger.critical(f"Failed to get merge requests: {e}") - return - logger.info(f"Found {len(merge_requests)} open merge requests") - - for i, mr in enumerate(merge_requests): - logger.info("-" * 20) - logger.info(f"Merge request {i+1} out of {len(merge_requests)}") - try: - source_project = get_project(mr["source_project_id"], config) - logger.info(f"Source Project: {source_project['name_with_namespace']}") - logger.info(f"Source Branch: {mr['source_branch']}") - target_project = get_project(mr["project_id"], config) - logger.info(f"Target Project: {target_project['name_with_namespace']}") - logger.info(f"Target Branch: {mr['target_branch']}") - logger.info(f"Commit SHA: {mr['sha']}") - logger.info(f"Created At: {mr['created_at']}") - status = mr["merge_status"] - logger.info(f"Merge Status: {status}") - wip = mr["work_in_progress"] - logger.info(f"Work in Progress: {wip}") - unresolved = count_unresolved_mr_discussions(mr, config) - logger.info(f"Unresolved Discussions: {unresolved}") - upvotes = mr["upvotes"] - logger.info(f"Upvotes: {upvotes}") - downvotes = mr["downvotes"] - logger.info(f"Downvotes: {downvotes}") - except Exception as e: - logger.error(f"Failed to extract merge request details: {e}") - continue - if ( - status == "can_be_merged" - and wip is False - and unresolved == 0 - and upvotes >= 2 - and downvotes == 0 - ): - logger.info("Merge request has been approved. Proceeding with merge.") - try: - result = accept_merge_request(mr, config) - except Exception as e: - logger.error(f"Merge failed! {e}") - continue - if result["state"] == "merged": - logger.info(f"Merge successful! Merge SHA {result['merge_commit_sha']}") - try: - with open("accepted_merge_requests.log", "a") as f: - f.write( - f"{result['merged_at']}, {source_project['name_with_namespace']}, {mr['source_branch']}, {mr['sha']}, {target_project['name_with_namespace']}, {mr['target_branch']}, {result['merge_commit_sha']}\n" - ) - except Exception as e: - logger.error(f"Failed to log accepted merge request: {e}") - try: - internal_update_repo( - target_project["ssh_url_to_repo"], - target_project["name"] - ) - except Exception as e: - logger.error(f"Failed to push to internal: {e}") - else: - logger.error(f"Merge failed! Merge status is {result['state']}") - else: - logger.info("Merge request has not been approved. Skipping.") - logger.info(f"RUN FINISHED") - logger.info("=" * 30) - - - if __name__ == "__main__": - check_merge_requests() + runcmd: # -------------------------------- diff --git a/deployment/secure_research_environment/cloud_init/scripts/check_merge_requests.py b/deployment/secure_research_environment/cloud_init/scripts/check_merge_requests.py new file mode 100644 index 0000000000..7e1a033d2e --- /dev/null +++ b/deployment/secure_research_environment/cloud_init/scripts/check_merge_requests.py @@ -0,0 +1,274 @@ +import requests +import subprocess +from urllib.parse import quote as url_quote +from pathlib import Path +import logging +from logging.handlers import RotatingFileHandler + +logger = logging.getLogger("merge_requests_logger") +logger.setLevel(logging.INFO) +formatter = logging.Formatter("%(asctime)s [%(levelname)s] %(message)s") +f_handler = RotatingFileHandler( + "check_merge_requests.log", maxBytes=5 * 1024 * 1024, backupCount=10 +) +f_handler.setFormatter(formatter) +c_handler = logging.StreamHandler() +c_handler.setFormatter(formatter) +logger.addHandler(f_handler) +logger.addHandler(c_handler) + +global HOME +global GL_INTERNAL_IP +global GL_INTERNAL_TOKEN +global GL_INTERNAL_AUTH_HEADER + +HOME = str(Path.home()) + +with open(f"{HOME}/.secrets/gitlab-internal-ip-address", "r") as f: + GL_INTERNAL_IP = f.readlines()[0].strip() + +GL_INTERNAL_URL = "http://" + GL_INTERNAL_IP + "/api/v4" + +with open(f"{HOME}/.secrets/gitlab-internal-api-token", "r") as f: + GL_INTERNAL_TOKEN = f.readlines()[0].strip() + +GL_INTERNAL_AUTH_HEADER = {"Authorization": "Bearer " + GL_INTERNAL_TOKEN} + + +def internal_project_exists(repo_name): + """Given a string (the name of a repo - not a URL), returns a pair + (exists, url): + - exists: boolean - does repo_name exist on GITLAB-INTERNAL? + - url: str - the ssh url to the repo (when 'exists' is true) + """ + + # build url-encoded repo_name + repo_path_encoded = url_quote("ingress/" + repo_name, safe='') + + # Does repo_name exist on GITLAB-INTERNAL? + response = requests.get(GL_INTERNAL_URL + '/projects/' + repo_path_encoded, + headers=GL_INTERNAL_AUTH_HEADER) + + if response.status_code == 404: + return (False, "") + elif response.status_code == 200: + return (True, response.json()["ssh_url_to_repo"]) + else: + # Not using `response.raise_for_status()`, since we also want + # to raise an exception on unexpected "successful" responses + # (not 200) + raise requests.HTTPError("Unexpected response: " + response.reason + + ", content: " + response.text) + + +def internal_update_repo(gh_url, repo_name): + """Takes a GitHub URL, `gh_url`, which should be the URL to the + "APPROVED" repo, clones it and pushes all branches to the repo + `repo_name` owned by 'ingress' on GITLAB-INTERNAL, creating it + there first if it doesn't exist. + """ + # clone the repo from gh_url (on GITLAB-EXTERNAL), removing any of + # the same name first (simpler than checking if it exists, has the + # same remote and pulling) + subprocess.run(["rm", "-rf", repo_name], check=True) + subprocess.run(["git", "clone", gh_url, repo_name], check=True) + + project_exists, gl_internal_repo_url = internal_project_exists(repo_name) + + # create the project if it doesn't exist + if not project_exists: + print("Creating: " + repo_name) + response = requests.post(GL_INTERNAL_URL + '/projects', + headers=GL_INTERNAL_AUTH_HEADER, + data={"name": repo_name, + "visibility": "public"}) + response.raise_for_status() + assert(response.json()["path_with_namespace"] == "ingress/" + repo_name) + + gl_internal_repo_url = response.json()["ssh_url_to_repo"] + + # Set the remote + subprocess.run(["git", "remote", "add", "gitlab-internal", + gl_internal_repo_url], cwd=repo_name, check=True) + + # Force push current contents of all branches + subprocess.run(["git", "push", "--force", "--all", + "gitlab-internal"], cwd=repo_name, check=True) + + +def get_request(endpoint, headers, params=None): + r = requests.get(endpoint, headers=headers, params=params) + if r.ok: + return r.json() + else: + raise ValueError( + f"Request failed: URL {endpoint}, CODE {r.status_code}, CONTENT {r.content}" + ) + + +def put_request(endpoint, headers, params=None): + r = requests.put(endpoint, headers=headers, params=params) + if r.ok: + return r.json() + else: + raise ValueError( + f"Request failed: URL {endpoint}, CODE {r.status_code}, CONTENT {r.content}" + ) + + +def get_gitlab_config(server="external"): + home = str(Path.home()) + + if server == "external": + with open(f"{home}/.secrets/gitlab-external-ip-address", "r") as f: + ip = f.readlines()[0].strip() + with open(f"{home}/.secrets/gitlab-external-api-token", "r") as f: + token = f.readlines()[0].strip() + elif server == "internal": + with open(f"{home}/.secrets/gitlab-internal-ip-address", "r") as f: + ip = f.readlines()[0].strip() + with open(f"{home}/.secrets/gitlab-internal-api-token", "r") as f: + token = f.readlines()[0].strip() + else: + raise ValueError("Server must be external or internal") + + api_url = f"http://{ip}/api/v4/" + headers = {"Authorization": "Bearer " + token} + + return {"api_url": api_url, "api_token": token, "ip": ip, "headers": headers} + + +def get_group_id(group_name, config): + endpoint = config["api_url"] + "groups" + response = get_request(endpoint, headers=config["headers"]) + for group in response: + if group["name"] == group_name: + return group["id"] + raise ValueError(f"{group_name} not found in groups.") + + +def get_project(project_id, config): + endpoint = config["api_url"] + f"projects/{project_id}" + project = get_request(endpoint, headers=config["headers"]) + return project + + +def get_merge_requests_for_approval(config): + group = get_group_id("approval", config) + endpoint = config["api_url"] + f"/groups/{group}/merge_requests" + response = get_request( + endpoint, headers=config["headers"], params={"state": "opened"} + ) + return response + + +def count_unresolved_mr_discussions(mr, config): + if mr["user_notes_count"] == 0: + return 0 + project_id = mr["project_id"] + mr_iid = mr["iid"] + endpoint = ( + config["api_url"] + f"projects/{project_id}/merge_requests/{mr_iid}/discussions" + ) + discussions = get_request(endpoint, headers=config["headers"]) + if len(discussions) == 0: + return 0 + else: + n_unresolved = 0 + for d in discussions: + for n in d["notes"]: + if n["resolvable"] is True and n["resolved"] is False: + n_unresolved += 1 + return n_unresolved + + +def accept_merge_request(mr, config): + project_id = mr["project_id"] + mr_iid = mr["iid"] + endpoint = ( + config["api_url"] + f"projects/{project_id}/merge_requests/{mr_iid}/merge" + ) + return put_request(endpoint, headers=config["headers"]) + + +def check_merge_requests(): + logger.info(f"STARTING RUN") + + try: + config = get_gitlab_config(server="external") + except Exception as e: + logger.critical(f"Failed to load gitlab secrets: {e}") + return + + logger.info("Getting open merge requests for approval") + try: + merge_requests = get_merge_requests_for_approval(config) + except Exception as e: + logger.critical(f"Failed to get merge requests: {e}") + return + logger.info(f"Found {len(merge_requests)} open merge requests") + + for i, mr in enumerate(merge_requests): + logger.info("-" * 20) + logger.info(f"Merge request {i+1} out of {len(merge_requests)}") + try: + source_project = get_project(mr["source_project_id"], config) + logger.info(f"Source Project: {source_project['name_with_namespace']}") + logger.info(f"Source Branch: {mr['source_branch']}") + target_project = get_project(mr["project_id"], config) + logger.info(f"Target Project: {target_project['name_with_namespace']}") + logger.info(f"Target Branch: {mr['target_branch']}") + logger.info(f"Commit SHA: {mr['sha']}") + logger.info(f"Created At: {mr['created_at']}") + status = mr["merge_status"] + logger.info(f"Merge Status: {status}") + wip = mr["work_in_progress"] + logger.info(f"Work in Progress: {wip}") + unresolved = count_unresolved_mr_discussions(mr, config) + logger.info(f"Unresolved Discussions: {unresolved}") + upvotes = mr["upvotes"] + logger.info(f"Upvotes: {upvotes}") + downvotes = mr["downvotes"] + logger.info(f"Downvotes: {downvotes}") + except Exception as e: + logger.error(f"Failed to extract merge request details: {e}") + continue + if ( + status == "can_be_merged" + and wip is False + and unresolved == 0 + and upvotes >= 2 + and downvotes == 0 + ): + logger.info("Merge request has been approved. Proceeding with merge.") + try: + result = accept_merge_request(mr, config) + except Exception as e: + logger.error(f"Merge failed! {e}") + continue + if result["state"] == "merged": + logger.info(f"Merge successful! Merge SHA {result['merge_commit_sha']}") + try: + with open("accepted_merge_requests.log", "a") as f: + f.write( + f"{result['merged_at']}, {source_project['name_with_namespace']}, {mr['source_branch']}, {mr['sha']}, {target_project['name_with_namespace']}, {mr['target_branch']}, {result['merge_commit_sha']}\n" + ) + except Exception as e: + logger.error(f"Failed to log accepted merge request: {e}") + try: + internal_update_repo( + target_project["ssh_url_to_repo"], + target_project["name"] + ) + except Exception as e: + logger.error(f"Failed to push to internal: {e}") + else: + logger.error(f"Merge failed! Merge status is {result['state']}") + else: + logger.info("Merge request has not been approved. Skipping.") + logger.info(f"RUN FINISHED") + logger.info("=" * 30) + + +if __name__ == "__main__": + check_merge_requests() diff --git a/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py b/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py new file mode 100644 index 0000000000..2768e11595 --- /dev/null +++ b/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py @@ -0,0 +1,663 @@ +#!/usr/bin/env python3 + +""" +Start from zipfile of a particular commit - should have filename +of the form __.zip + +We want to turn this into a merge request on a Gitlab project. + +1) get useful gitlab stuff (url, api key, namespace_ids for our groups) +2) unzip zipfiles in specified directory +3) loop over unzipped repos. For each one: + a) see if "unapproved" project with same name exists, if not, create it + b) commit and push to "unapproved" project, branch=commit_hash + c) see "approval" project with same name exists, if not, create it + d) create branch=desired_branch_name on "approval" project + e) create merge request from unapproved/repo_name/commit_hash to + approval/repo_name/desired_branch_name +4) clean up - remove zipfiles and unpacked repos. +""" + + +import os +import shutil +import re +import requests +import subprocess +from zipfile import ZipFile, BadZipFile +from urllib.parse import quote as url_quote +from pathlib import Path +import logging +from logging.handlers import RotatingFileHandler + +logger = logging.getLogger("project_upload_logger") +logger.setLevel(logging.INFO) +formatter = logging.Formatter("%(asctime)s [%(levelname)s] %(message)s") +f_handler = RotatingFileHandler( +"upload_zipfiles_to_projects.log", maxBytes=5 * 1024 * 1024, backupCount=10 +) +f_handler.setFormatter(formatter) +c_handler = logging.StreamHandler() +c_handler.setFormatter(formatter) +logger.addHandler(f_handler) +logger.addHandler(c_handler) + + +def unzip_zipfiles(zipfile_dir, tmp_repo_dir): + """ + Parameters + ========== + zipfile_dir: str, path to directory containing zipfiles + tmp_repo_dir: str, path to directory where zipfiles will be unzipped + + Returns + ======= + output_list: list of tuples + [(repo_name, commit_hash, desired_branch, unzipped-path),...] + + Note that the convention for the zipfile filenames is + __.zip + """ + output_list = [] + repo_commit_regex = re.compile("([-\w]+)_([a-f\d]+)_([\S]+).zip") + # tear down and recreate the directory where we will put the unpacked zip + shutil.rmtree(tmp_repo_dir, ignore_errors=True) + os.makedirs(tmp_repo_dir) + # look in a directory for zipfiles + zipfiles = os.listdir(zipfile_dir) + for zipfile in zipfiles: + filename_match = repo_commit_regex.search(zipfile) + if not filename_match: + print("Badly named zipfile! {}".format(zipfile)) + continue + repo_name, commit_hash, branch = filename_match.groups() + + # unzip + try: + zipfile_path = os.path.join(zipfile_dir, zipfile) + with ZipFile(zipfile_path, 'r') as zip_obj: + zip_obj.extractall(path=tmp_repo_dir) + # we should have made a new directory - find its name + unpacked_zips = os.listdir(tmp_repo_dir) + # should be one and only one directory in here + if len(unpacked_zips) != 1: + raise RuntimeError("Unexpected number of items in unpacked zip directory {}: {}".format(tmp_repo_dir, unpacked_zips)) + unpacked_location = os.path.join(tmp_repo_dir, unpacked_zips[0]) + output_list.append((repo_name, commit_hash, branch, unpacked_location)) + except(BadZipFile): + print("Bad zipfile: {}".format(zipfile)) + continue + return output_list + + +def get_gitlab_config(): + """ + Return a dictionary containing the base URL for the gitlab API, + the API token, the IP address, and the headers to go in any request + """ + home = str(Path.home()) + + with open(f"{home}/.secrets/gitlab-external-ip-address", "r") as f: + ip = f.readlines()[0].strip() + with open(f"{home}/.secrets/gitlab-external-api-token", "r") as f: + token = f.readlines()[0].strip() + + api_url = f"http://{ip}/api/v4/" + headers = {"Authorization": "Bearer " + token} + + return {"api_url": api_url, + "api_token": token, + "ip": ip, + "headers": headers} + + +def get_group_namespace_ids(gitlab_url, gitlab_token, + groups=["approval","unapproved"]): + """ + Find the namespace_id corresponding to the groups we're interested in, + e.g. 'approval' and 'unapproved'. + + Parameters + ========== + gitlab_url: str, base URL for the API + gitlab_token: str, API token for Gitlab + groups: list of string, the group names to look for. + + Returns + ======= + namespace_id_dict: dict, format {: } + + """ + namespaces_url = "{}/namespaces/".format(gitlab_url) + response = requests.get(namespaces_url, + headers = {"Authorization": "Bearer "+gitlab_token}) + if response.status_code != 200: + raise RuntimeError("Bad request: {} {}"\ + .format(response.status_code, response.content)) + gitlab_namespaces = response.json() + namespace_id_dict = {} + for namespace in gitlab_namespaces: + if namespace["kind"] == "group" and namespace["name"] in groups: + namespace_id_dict[namespace["name"]] = namespace["id"] + return namespace_id_dict + + +def get_gitlab_project_list(gitlab_url, gitlab_token): + """ + Get the list of Projects. + + Parameters + ========== + namespace_id: int, ID of the group ("unapproved" or "approval") + gitlab_url: str, base URL for the API + gitlab_token: str, API token. + + Returns + ======= + gitlab_projects: list of dictionaries. + """ + + # list currently existing projects on Gitlab + projects_url = "{}/projects/".format(gitlab_url) + response = requests.get(projects_url, + headers = {"Authorization": "Bearer "+gitlab_token}, + params = {"owned": True, "simple": True}) + + if response.status_code != 200: + raise RuntimeError("Bad request: {} {}"\ + .format(response.status_code, response.content)) + gitlab_projects = response.json() + return gitlab_projects + + +def check_if_project_exists(repo_name, namespace_id, gitlab_url, gitlab_token): + """ + Get a list of projects from the API - check if namespace_id (i.e. group) + and name match. + + Parameters + ========== + repo_name: str, name of our repository/project + namespace_id: int, id of our group ("unapproved" or "approval") + gitlab_url: str, base URL of Gitlab API + gitlab_token: str, API key for Gitlab API. + + Returns + ======= + bool, True if project exists, False otherwise. + """ + projects = get_gitlab_project_list(gitlab_url, gitlab_token) + for project in projects: + if project["name"] == repo_name and \ + project["namespace"]["id"] == namespace_id: + return True + return False + + +def get_project_info(repo_name, namespace_id, gitlab_url, gitlab_token): + """ + Check if project exists, and if so get its ID. Otherwise, create + it and return the ID. + + Parameters + ========== + repo_name: str, name of our repository/project + namespace_id: int, id of our group ("unapproved" or "approval") + gitlab_url: str, base URL of Gitlab API + gitlab_token: str, API key for Gitlab API. + + Returns + ======= + project_info: dict, containing info from the projects API endpoint + """ + already_exists = check_if_project_exists(repo_name, + namespace_id, + gitlab_url, + gitlab_token) + if already_exists: + projects = get_gitlab_project_list(gitlab_url, gitlab_token) + for project_info in projects: + if project_info["name"] == repo_name and \ + project_info["namespace"]["id"] == namespace_id: + return project_info + else: + project_info = create_project(repo_name, + namespace_id, + gitlab_url, + gitlab_token) + return project_info + + +def get_project_remote_url(repo_name, namespace_id, + gitlab_url, gitlab_token): + """ + Given the name of a repository and namespace_id (i.e. group, + "unapproved" or "approval"), either return the remote URL for project + matching the repo name, or create it if it doesn't exist already, + and again return the remote URL. + + Parameters + ========== + repo_name: str, name of the repository/project we're looking for. + namespace_id: int, the ID of the group ("unapproved" or "approval") + gitlab_url: str, base URL of the API + gitlab_token: str, API key + + Returns + ======= + gitlab_project_url: str, the URL to be set as the "remote". + """ + project_info = get_project_info(repo_name, namespace_id, + gitlab_url, gitlab_token) + + return project_info["ssh_url_to_repo"] + + +def get_project_id(repo_name, namespace_id, + gitlab_url, gitlab_token): + """ + Given the name of a repository and namespace_id (i.e. group, + "unapproved" or "approval"), either return the id of project + matching the repo name, or create it if it doesn't exist already, + and again return the id. + + Parameters + ========== + repo_name: str, name of the repository/project we're looking for. + namespace_id: int, the ID of the group ("unapproved" or "approval") + gitlab_url: str, base URL of the API + gitlab_token: str, API key + + Returns + ======= + gitlab_project_url: str, the URL to be set as the "remote". + """ + project_info = get_project_info(repo_name, namespace_id, + gitlab_url, gitlab_token) + + return project_info["id"] + + +def create_project(repo_name, namespace_id, gitlab_url, gitlab_token): + """ + Create empty project on gitlab, and return the corresponding remote URL. + + Parameters + ========== + repo_name: str, name of the repository/project + namespace_id: int, ID of the group ("unapproved" or "approved") + gitlab_url: str, base URL of the API + gitlab_token: str, API token. + + Returns + ======= + gitlab_project_info: dict, containing among other things, the name and + the remote URL for the project. + """ + projects_url = "{}projects/".format(gitlab_url) + response = requests.post(projects_url, + headers = {"Authorization": "Bearer "+gitlab_token}, + data = {"name": repo_name, + "visibility": "public", + "namespace_id": namespace_id} + ) + assert(response.json()["name"] == repo_name) + project_info = response.json() + print("Created project {} in namespace {}, project_id {}".\ + format(repo_name, namespace_id, project_info["id"])) + return project_info + + +def check_if_branch_exists(branch_name, + project_id, + gitlab_url, + gitlab_token): + """ + See if a branch with name branch_name already exists on this Project + + Parameters + ========== + branch_name: str, name of branch to look for + project_id: int, id of the project, obtained from projects API endpoint + gitlab_url: base URL of the Gitlab API + gitlab_token: API token for the Gitlab API + + Returns + ======= + branch_exists: bool, True if branch exists, False if not. + """ + branches_url = "{}/projects/{}/repository/branches".\ + format(gitlab_url, project_id) + response = requests.get(branches_url, + headers={"Authorization": "Bearer "+gitlab_token}) + if response.status_code != 200: + raise RuntimeError("Unable to check for branch {} on project {}: {}".\ + format(branch_name, project_id, r.content)) + branches = response.json() + for branch_info in branches: + if branch_info["name"] == branch_name: + return True + return False + + + +def create_branch(branch_name, + project_id, + gitlab_url, + gitlab_token, + reference_branch="master"): + """ + Create a new branch on an existing project. By default, use 'master' + as the reference branch from which to create the new one. + + Parameters + ========== + branch_name: str, the desired name of the new branch + project_id: int, the ID of the project, which is the "id" value in + the dictionary of project information returned when + creating a new project or listing existing ones. + gitlab_url: str, the base URL for the Gitlab API + gitlab_token: str, the Gitlab API token + + Returns + ======= + branch_info: dict, info about the branch from API endpoint + """ + # assume branch doesn't already exist - create it! + branch_url = "{}/projects/{}/repository/branches".format(gitlab_url, project_id) + response = requests.post(branch_url, + headers = {"Authorization": "Bearer "+gitlab_token}, + data = {"branch": branch_name, "ref": reference_branch}) + if response.status_code != 201: + raise RuntimeError("Problem creating branch {}: {}".format(branch_name, + response.content)) + branch_info = response.json() + assert branch_info["name"] == branch_name + return branch_info + + +def check_if_merge_request_exists(repo_name, + source_project_id, + source_branch, + target_project_id, + target_branch, + gitlab_url, gitlab_token): + """ + See if there is an existing merge request between the source and target + project/branch combinations. + + Parameters + ========== + repo_name: str, name of the repository + source_project_id: int, project_id for the unapproved project, obtainable + as the "ID" field of the json returned from the + projects API endpoint. + source_branch: str, name of the branch on source project, will typically + be the commit_hash from the original repo. + target_project_id: int, project_id for the "approval" group's project. + target_branch: str, name of branch on target project, will typically + be the desired branch name. + gitlab_url: str, base URL for the Gitlab API + gitlab_token: str, API token for the Gitlab API. + + Returns + ======= + bool, True if merge request already exists, False otherwise + """ + mr_url = "{}/projects/{}/merge_requests".format(gitlab_url, source_project_id) + response = requests.get(mr_url, + headers = {"Authorization": "Bearer "+gitlab_token}) + if response.status_code != 200: + raise RuntimeError("Request to check existence of MR failed: {} {}".\ + format(response.status_code, response.content)) + for mr in response.json(): + if mr["source_branch"] == source_branch and \ + mr["target_branch"] == target_branch: + print("Merge request {} -> {} already exists".\ + format(source_branch, target_branch)) + return True + return False + + +def create_merge_request(repo_name, + source_project_id, + source_branch, + target_project_id, + target_branch, + gitlab_url, gitlab_token): + + """ + Create a new MR, e.g. from the branch in the "unapproved" + group's project, to the branch in the "approval" + group's project. + + Parameters + ========== + repo_name: str, name of the repository + source_project_id: int, project_id for the unapproved project, obtainable + as the "ID" field of the json returned from the + projects API endpoint. + source_branch: str, name of the branch on source project, will typically + be the commit_hash from the original repo. + target_project_id: int, project_id for the "approval" group's project. + target_branch: str, name of branch on target project, will typically + be the desired branch name. + gitlab_url: str, base URL for the Gitlab API + gitlab_token: str, API token for the Gitlab API. + + Returns + ======= + mr_info: dict, the response from the API upon creating the Merge Request + """ + # first need to create a forked-from relationship between the projects + fork_url = "{}/projects/{}/fork/{}".format(gitlab_url, + source_project_id, + target_project_id) + response = requests.post(fork_url, + headers = {"Authorization": "Bearer "+gitlab_token}) + # status code 201 if fork relationship created, or 409 if already there + if (response.status_code != 201) and (response.status_code != 409): + raise RuntimeError("Unable to create fork relationship: {} {}".\ + format(response.status_code, response.content)) + + mr_url = "{}/projects/{}/merge_requests".format(gitlab_url, source_project_id) + title = "{}: {} to {}".format(repo_name, source_branch, target_branch) + response = requests.post(mr_url, + headers = {"Authorization": "Bearer "+gitlab_token}, + data = {"source_branch": source_branch, + "target_branch": target_branch, + "target_project_id": target_project_id, + "title": title}) + if response.status_code != 201: + raise RuntimeError("Problem creating Merge Request {} {} {}: {}"\ + .format(repo_name, source_branch,target_branch, + response.content)) + mr_info = response.json() + return mr_info + + + + + +def push_to_remote(path_to_unzipped_repo, commit_hash, remote_url): + """ + Run shell commands to convert the unzipped directory containing the + repository contents into a git repo, then commit it to a branch named + as the commit_hash. + + Parameters + ========== + path_to_unzipped_repo: str, the full directory path to the unzipped repo + commit_hash: str, original commit hash from the external git repo, will + be used as the name of the branch to push to + remote_url: str, the URL for this project on gitlab-external to be added + as a "remote". + """ + subprocess.run(["git","init"], cwd=path_to_unzipped_repo, check=True) + # Create a branch named after the original commit hash + subprocess.run(["git","checkout","-b",commit_hash], + cwd=path_to_unzipped_repo, check=True) + # Commit everything to this branch, also putting commit hash into message + subprocess.run(["git","add","."], cwd=path_to_unzipped_repo, check=True) + subprocess.run(["git","commit","-m",commit_hash], + cwd=path_to_unzipped_repo, check=True) + # add the remote_url as a remote called 'gitlab-external' + subprocess.run(["git","remote","add","gitlab-external",remote_url], + cwd=path_to_unzipped_repo, check=True) + # Push to gitlab external + subprocess.run(["git","push","--force","--all","gitlab-external"], + cwd=path_to_unzipped_repo, check=True) + + +def create_and_push_unapproved_project(repo_name, + namespace_id, + gitlab_url, + gitlab_token, + path_to_unzipped_repo, + commit_hash): + """ + We have unzipped a zipfile, and put the contents (i.e. the code we want + to push) in path_to_unzipped_project. + Now we create the project in the "unapproved" group on Gitlab, and push + to it. + + Parameters + ========== + repo_name: str, name of our repository/project + gitlab_url: str, the base URL of Gitlab API + gitlab_token: str, API token for Gitlab API + path_to_unzipped_repo: str, full directory path to code we want to commit + commit_hash: str, the commit hash from the original repo, to be used as + the name of the branch we'll push to + + Returns + ======= + project_id: int, ID of the project as returned by projects API endpoint + """ + # Get project ID - project will be created if it didn't already exist + project_id = get_project_id(repo_name, namespace_id, gitlab_url, gitlab_token) + assert project_id + # see if branch already exists with name=commit_hash + branch_exists = check_if_branch_exists(commit_hash, + project_id, + gitlab_url, + gitlab_token) + if branch_exists: + print("Branch {} already exists".format(commit_hash)) + # already exists - do nothing + return project_id + # otherwise we need to commit code to it and push + remote_url = get_project_remote_url(repo_name, namespace_id, + gitlab_url, gitlab_token) + print("remote URL for {} is {}".format(repo_name, remote_url)) + + push_to_remote(path_to_unzipped_repo, commit_hash, remote_url) + # Return the project_id, to use in merge request + return project_id + + +def create_approved_project_branch(repo_name, + branch_name, + namespace_id, + gitlab_url, + gitlab_token): + """ + Create a new branch (and a new project if it doesn't already exist) + owned by the "approval" group. This will be the target for the merge + request. + + Parameters + ========== + repo_name: str, repository name + gitlab_url: str, base URL for Gitlab API + gitlab_token: str, API token for Gitlab API + branch_name: str, the desired branch name. + + Returns + ======= + project_id: int, the "ID" field in the info from projects API endpoint + """ + # get the project ID - project will be created if it doesn't already exist + project_id = get_project_id(repo_name, namespace_id, gitlab_url, gitlab_token) + assert project_id + + # create the branch if it doesn't already exist + branch_exists = check_if_branch_exists(branch_name, + project_id, + gitlab_url, + gitlab_token) + if not branch_exists: + branch_info = create_branch(branch_name, + project_id, + gitlab_url, + gitlab_token) + assert branch_info["name"] == branch_name + # return the ID of this project so we can use it in merge request + return project_id + + +def main(): + # create a directory to unpack the zipfiles into + TMP_REPO_DIR = "/tmp/repos" + os.makedirs(TMP_REPO_DIR, exist_ok=True) + # get the gitlab config + config = get_gitlab_config() + ZIPFILE_DIR = "/zfiles" + + # unzip the zipfiles, and retrieve a list of tuples describing + # (repo_name, commit_hash, desired_branch, unzipped_location) + unzipped_repos = unzip_zipfiles(ZIPFILE_DIR, TMP_REPO_DIR) + + # get the namespace_ids of our "approval" and "unapproved" groups + GROUPS = ["unapproved","approval"] + namespace_ids = get_group_namespace_ids(config["api_url"], + config["api_token"], + GROUPS) + + # loop over all our newly unzipped repositories + for repo in unzipped_repos: + # unpack tuple + repo_name, commit_hash, branch_name, location = repo + print("Unpacked {} {} {}".format(repo_name, commit_hash, branch_name)) + src_project_id = create_and_push_unapproved_project(repo_name, + namespace_ids[GROUPS[0]], + config["api_url"], + config["api_token"], + location, + commit_hash) + print("Created project {}/{} branch {}".\ + format(GROUPS[0],repo_name, commit_hash)) + + # create project and branch on approved repo + target_project_id = create_approved_project_branch(repo_name, + branch_name, + namespace_ids[GROUPS[1]], + config["api_url"], + config["api_token"]) + print("Created project {}/{} branch {}".\ + format(GROUPS[1],repo_name, branch_name)) + + mr_exists = check_if_merge_request_exists(repo_name, + src_project_id, + commit_hash, + target_project_id, + branch_name, + config["api_url"], + config["api_token"]) + if mr_exists: + print("Merge request {} -> {} already exists. skipping".\ + format(commit_hash, branch_name)) + else: + # create merge request + create_merge_request(repo_name, + src_project_id, + commit_hash, + target_project_id, + branch_name, + config["api_url"], + config["api_token"]) + print("Created merge request {} -> {}".\ + format(commit_hash, branch_name)) + +if __name__ == "__main__": + main() From 46b778d597d95c641a18b385d16e63ea26f9bda2 Mon Sep 17 00:00:00 2001 From: nbarlowATI Date: Thu, 7 May 2020 12:02:43 +0100 Subject: [PATCH 031/155] substitute scripts into cloud init --- .../setup/Setup_SRE_WebApp_Servers.ps1 | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 b/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 index 8e4370f4a6..ce3937fe5f 100644 --- a/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 +++ b/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 @@ -87,7 +87,7 @@ $gitlabCloudInit = $gitlabCloudInitTemplate.Replace('', $shmDcFq Replace('',$gitlabInternalUsername). Replace('',$gitlabInternalPassword). Replace('',$gitlabInternalAPIToken) - + # Encode as base64 $gitlabCloudInitEncoded = [System.Convert]::ToBase64String([System.Text.Encoding]::UTF8.GetBytes($gitlabCloudInit)) @@ -196,6 +196,19 @@ $gitlabLdapUserDn = "CN=" + $config.sre.users.ldap.gitlab.name + "," + $config.s $gitlabUserFilter = "(&(objectClass=user)(memberOf=CN=" + $config.sre.domain.securityGroups.reviewUsers.name + "," + $config.shm.domain.securityOuPath + "))" $gitlabExternalCloudInitTemplate = Join-Path $PSScriptRoot ".." "cloud_init" "cloud-init-gitlab-external.template.yaml" | Get-Item | Get-Content -Raw + +# Insert scripts into the cloud-init template +# ------------------------------------------- +$indent = " " +foreach ($scriptName in @("zipfile_to_gitlab_project.py", + "check_merge_requests.py")) { + + $raw_script = Get-Content (Join-Path $PSScriptRoot ".." "cloud_init" "scripts" $scriptName) -Raw + $indented_script = $raw_script -split "`n" | ForEach-Object { "${indent}$_" } | Join-String -Separator "`n" + $gitlabExternalCloudInitTemplate = $gitlabExternalCloudInitTemplate.Replace("${indent}<$scriptName>", $indented_script) +} + + $gitlabExternalCloudInit = $gitlabExternalCloudInitTemplate.Replace('',$sreAdminUsername). Replace('',$config.sre.webapps.gitlab.internal.ip). Replace('',$config.shm.domain.fqdn). From 56cd9ca66703902742be1dcdec491a3a3f2bdbba Mon Sep 17 00:00:00 2001 From: nbarlowATI Date: Thu, 7 May 2020 12:03:21 +0100 Subject: [PATCH 032/155] temporarily allow 500 status code when creating merge request --- .../scripts/zipfile_to_gitlab_project.py | 48 ++++++++++--------- 1 file changed, 25 insertions(+), 23 deletions(-) diff --git a/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py b/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py index 2768e11595..449b20486c 100644 --- a/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py +++ b/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py @@ -79,9 +79,6 @@ def unzip_zipfiles(zipfile_dir, tmp_repo_dir): zip_obj.extractall(path=tmp_repo_dir) # we should have made a new directory - find its name unpacked_zips = os.listdir(tmp_repo_dir) - # should be one and only one directory in here - if len(unpacked_zips) != 1: - raise RuntimeError("Unexpected number of items in unpacked zip directory {}: {}".format(tmp_repo_dir, unpacked_zips)) unpacked_location = os.path.join(tmp_repo_dir, unpacked_zips[0]) output_list.append((repo_name, commit_hash, branch, unpacked_location)) except(BadZipFile): @@ -404,7 +401,7 @@ def check_if_merge_request_exists(repo_name, ======= bool, True if merge request already exists, False otherwise """ - mr_url = "{}/projects/{}/merge_requests".format(gitlab_url, source_project_id) + mr_url = "{}/projects/{}/merge_requests".format(gitlab_url, target_project_id) response = requests.get(mr_url, headers = {"Authorization": "Bearer "+gitlab_token}) if response.status_code != 200: @@ -438,7 +435,7 @@ def create_merge_request(repo_name, as the "ID" field of the json returned from the projects API endpoint. source_branch: str, name of the branch on source project, will typically - be the commit_hash from the original repo. + be the 'branch-'. target_project_id: int, project_id for the "approval" group's project. target_branch: str, name of branch on target project, will typically be the desired branch name. @@ -469,17 +466,20 @@ def create_merge_request(repo_name, "target_project_id": target_project_id, "title": title}) if response.status_code != 201: - raise RuntimeError("Problem creating Merge Request {} {} {}: {}"\ - .format(repo_name, source_branch,target_branch, - response.content)) +# raise RuntimeError("Problem creating Merge Request {} {} {}: {}"\ +# .format(repo_name, source_branch,target_branch, +# response.content)) +##### TEMPORARY - don't raise an error here - we get 500 status code +##### even though MR is created it - under investigation. + print("Problem creating Merge Request {} {} {}: {}"\ + .format(repo_name, source_branch,target_branch, + response.content)) + return {} mr_info = response.json() return mr_info - - - -def push_to_remote(path_to_unzipped_repo, commit_hash, remote_url): +def push_to_remote(path_to_unzipped_repo, branch_name, remote_url): """ Run shell commands to convert the unzipped directory containing the repository contents into a git repo, then commit it to a branch named @@ -488,18 +488,19 @@ def push_to_remote(path_to_unzipped_repo, commit_hash, remote_url): Parameters ========== path_to_unzipped_repo: str, the full directory path to the unzipped repo - commit_hash: str, original commit hash from the external git repo, will + branch_name: str, original commit hash from the external git repo, will be used as the name of the branch to push to remote_url: str, the URL for this project on gitlab-external to be added as a "remote". """ subprocess.run(["git","init"], cwd=path_to_unzipped_repo, check=True) # Create a branch named after the original commit hash - subprocess.run(["git","checkout","-b",commit_hash], + subprocess.run(["git","checkout","-b",branch_name], cwd=path_to_unzipped_repo, check=True) # Commit everything to this branch, also putting commit hash into message subprocess.run(["git","add","."], cwd=path_to_unzipped_repo, check=True) - subprocess.run(["git","commit","-m",commit_hash], + commit_msg = "Committing to branch {}".format(branch_name) + subprocess.run(["git","commit","-m", commit_msg], cwd=path_to_unzipped_repo, check=True) # add the remote_url as a remote called 'gitlab-external' subprocess.run(["git","remote","add","gitlab-external",remote_url], @@ -514,7 +515,7 @@ def create_and_push_unapproved_project(repo_name, gitlab_url, gitlab_token, path_to_unzipped_repo, - commit_hash): + branch_name): """ We have unzipped a zipfile, and put the contents (i.e. the code we want to push) in path_to_unzipped_project. @@ -527,7 +528,7 @@ def create_and_push_unapproved_project(repo_name, gitlab_url: str, the base URL of Gitlab API gitlab_token: str, API token for Gitlab API path_to_unzipped_repo: str, full directory path to code we want to commit - commit_hash: str, the commit hash from the original repo, to be used as + branch_name: str, the commit hash from the original repo, to be used as the name of the branch we'll push to Returns @@ -538,12 +539,12 @@ def create_and_push_unapproved_project(repo_name, project_id = get_project_id(repo_name, namespace_id, gitlab_url, gitlab_token) assert project_id # see if branch already exists with name=commit_hash - branch_exists = check_if_branch_exists(commit_hash, + branch_exists = check_if_branch_exists(branch_name, project_id, gitlab_url, gitlab_token) if branch_exists: - print("Branch {} already exists".format(commit_hash)) + print("Branch {} already exists".format(branch_name)) # already exists - do nothing return project_id # otherwise we need to commit code to it and push @@ -551,7 +552,7 @@ def create_and_push_unapproved_project(repo_name, gitlab_url, gitlab_token) print("remote URL for {} is {}".format(repo_name, remote_url)) - push_to_remote(path_to_unzipped_repo, commit_hash, remote_url) + push_to_remote(path_to_unzipped_repo, branch_name, remote_url) # Return the project_id, to use in merge request return project_id @@ -619,14 +620,15 @@ def main(): # unpack tuple repo_name, commit_hash, branch_name, location = repo print("Unpacked {} {} {}".format(repo_name, commit_hash, branch_name)) + unapproved_branch_name = "branch-{}".format(commit_hash) src_project_id = create_and_push_unapproved_project(repo_name, namespace_ids[GROUPS[0]], config["api_url"], config["api_token"], location, - commit_hash) + unapproved_branch_name) print("Created project {}/{} branch {}".\ - format(GROUPS[0],repo_name, commit_hash)) + format(GROUPS[0],repo_name, unapproved_branch_name)) # create project and branch on approved repo target_project_id = create_approved_project_branch(repo_name, @@ -651,7 +653,7 @@ def main(): # create merge request create_merge_request(repo_name, src_project_id, - commit_hash, + unapproved_branch_name, target_project_id, branch_name, config["api_url"], From bbb965b43dc0ef461c10019fcccce06ec345d074 Mon Sep 17 00:00:00 2001 From: Jack Roberts Date: Mon, 11 May 2020 12:55:54 +0100 Subject: [PATCH 033/155] check_merge_requests docstrings, comments and formatting --- .../scripts/check_merge_requests.py | 307 ++++++++++++++---- 1 file changed, 252 insertions(+), 55 deletions(-) diff --git a/deployment/secure_research_environment/cloud_init/scripts/check_merge_requests.py b/deployment/secure_research_environment/cloud_init/scripts/check_merge_requests.py index 7e1a033d2e..5971352464 100644 --- a/deployment/secure_research_environment/cloud_init/scripts/check_merge_requests.py +++ b/deployment/secure_research_environment/cloud_init/scripts/check_merge_requests.py @@ -1,3 +1,25 @@ +#!/usr/bin/env python3 + +""" +Check merge requests on gitlab external, approve them where appropriate, +and push the approved repos to gitlab internal. + +1) Get open merge requests in the approval group on gitlab external. +2) Check whether any of them meet the approval conditions. By default: status +is can be merged, not flagged as work in progress, no unresolved discussions, +at least two upvotes, and no downvotes. +3) Accept approved merge requests (merged unapproved repo into approval repo). +4) Push whole approval repo to gitlab internal, creating the repo if it doesn't +already exist. + +This script creates two log files in the same directory that it is run from: +* check_merge_requests.log : A verbose log of the steps performed in each run +and any errors encountered. +* accepted_merge_requests.log : A list of merge requests that have been accepted +in CSV format with columns merged time, source project, source branch, source +commit, target project, target branch and target commit. +""" + import requests import subprocess from urllib.parse import quote as url_quote @@ -5,6 +27,8 @@ import logging from logging.handlers import RotatingFileHandler +# Setup logging to console and file. File uses RotatingFileHandler to create +# logs over a rolling window, 10 files each max 5 MB in size. logger = logging.getLogger("merge_requests_logger") logger.setLevel(logging.INFO) formatter = logging.Formatter("%(asctime)s [%(levelname)s] %(message)s") @@ -17,37 +41,39 @@ logger.addHandler(f_handler) logger.addHandler(c_handler) -global HOME -global GL_INTERNAL_IP -global GL_INTERNAL_TOKEN -global GL_INTERNAL_AUTH_HEADER - -HOME = str(Path.home()) - -with open(f"{HOME}/.secrets/gitlab-internal-ip-address", "r") as f: - GL_INTERNAL_IP = f.readlines()[0].strip() - -GL_INTERNAL_URL = "http://" + GL_INTERNAL_IP + "/api/v4" - -with open(f"{HOME}/.secrets/gitlab-internal-api-token", "r") as f: - GL_INTERNAL_TOKEN = f.readlines()[0].strip() - -GL_INTERNAL_AUTH_HEADER = {"Authorization": "Bearer " + GL_INTERNAL_TOKEN} - -def internal_project_exists(repo_name): - """Given a string (the name of a repo - not a URL), returns a pair - (exists, url): - - exists: boolean - does repo_name exist on GITLAB-INTERNAL? - - url: str - the ssh url to the repo (when 'exists' is true) +def internal_project_exists(repo_name, config): + """Determine whether a repo exist in the ingress namespace on + GITLAB-INTERNAL. + + Parameters + ---------- + repo_name : str + The name of a repo (not a URL) to search for in the ingress namespace + on GITLAB-INTERNAL. + config : dict + GITLAB-INTERNAL details and secrets as returned by get_gitlab_config + + Returns + ------- + tuple + (exists, url) tuple where exists: boolean - does repo_name exist on + GITLAB-INTERNAL?, and url: str - the ssh url to the repo (when 'exists' + is true) + + Raises + ------ + requests.HTTPError + If API request returns an unexpected code (not 404 or 200) """ # build url-encoded repo_name - repo_path_encoded = url_quote("ingress/" + repo_name, safe='') + repo_path_encoded = url_quote("ingress/" + repo_name, safe="") # Does repo_name exist on GITLAB-INTERNAL? - response = requests.get(GL_INTERNAL_URL + '/projects/' + repo_path_encoded, - headers=GL_INTERNAL_AUTH_HEADER) + response = requests.get( + config["api_url"] + "projects/" + repo_path_encoded, headers=config["headers"] + ) if response.status_code == 404: return (False, "") @@ -57,73 +83,152 @@ def internal_project_exists(repo_name): # Not using `response.raise_for_status()`, since we also want # to raise an exception on unexpected "successful" responses # (not 200) - raise requests.HTTPError("Unexpected response: " + response.reason - + ", content: " + response.text) + raise requests.HTTPError( + "Unexpected response: " + response.reason + ", content: " + response.text + ) -def internal_update_repo(gh_url, repo_name): - """Takes a GitHub URL, `gh_url`, which should be the URL to the - "APPROVED" repo, clones it and pushes all branches to the repo - `repo_name` owned by 'ingress' on GITLAB-INTERNAL, creating it +def internal_update_repo(git_url, repo_name, config): + """Takes a git URL, `git_url`, which should be the URL to the + "APPROVED" repo on GITLAB-EXTERNAL, clones it and pushes all branches to + the repo `repo_name` owned by 'ingress' on GITLAB-INTERNAL, creating it there first if it doesn't exist. + + Parameters + ---------- + git_url : str + URL to the "APPROVED" repo on GITLAB-EXTERNAL + repo_name : str + Name of repo to create on GITLAB-INTERNAL. + config : dict + GITLAB-INTERNAL details and secrets as returned by get_gitlab_config """ - # clone the repo from gh_url (on GITLAB-EXTERNAL), removing any of + + # clone the repo from git_url (on GITLAB-EXTERNAL), removing any of # the same name first (simpler than checking if it exists, has the # same remote and pulling) subprocess.run(["rm", "-rf", repo_name], check=True) - subprocess.run(["git", "clone", gh_url, repo_name], check=True) + subprocess.run(["git", "clone", git_url, repo_name], check=True) - project_exists, gl_internal_repo_url = internal_project_exists(repo_name) + project_exists, gl_internal_repo_url = internal_project_exists(repo_name, config) # create the project if it doesn't exist if not project_exists: print("Creating: " + repo_name) - response = requests.post(GL_INTERNAL_URL + '/projects', - headers=GL_INTERNAL_AUTH_HEADER, - data={"name": repo_name, - "visibility": "public"}) + response = requests.post( + config["api_url"] + "projects", + headers=config["headers"], + data={"name": repo_name, "visibility": "public"}, + ) response.raise_for_status() - assert(response.json()["path_with_namespace"] == "ingress/" + repo_name) + assert response.json()["path_with_namespace"] == "ingress/" + repo_name gl_internal_repo_url = response.json()["ssh_url_to_repo"] # Set the remote - subprocess.run(["git", "remote", "add", "gitlab-internal", - gl_internal_repo_url], cwd=repo_name, check=True) + subprocess.run( + ["git", "remote", "add", "gitlab-internal", gl_internal_repo_url], + cwd=repo_name, + check=True, + ) # Force push current contents of all branches - subprocess.run(["git", "push", "--force", "--all", - "gitlab-internal"], cwd=repo_name, check=True) + subprocess.run( + ["git", "push", "--force", "--all", "gitlab-internal"], + cwd=repo_name, + check=True, + ) def get_request(endpoint, headers, params=None): + """Wrapper around requests.get that returns a JSON if request was successful + or raises a HTTPError otherwise. + + Parameters + ---------- + endpoint : str + URL of API endpoint to call + headers : dict + Request headers + params : dict, optional + Request parameters + + Returns + ------- + dict + JSON of request result + + Raises + ------ + requests.HTTPError + If not r.ok, raise HTTPError with details of failure + """ r = requests.get(endpoint, headers=headers, params=params) if r.ok: return r.json() else: - raise ValueError( + raise requests.HTTPError( f"Request failed: URL {endpoint}, CODE {r.status_code}, CONTENT {r.content}" ) def put_request(endpoint, headers, params=None): + """Wrapper around requests.put that returns a JSON if request was successful + or raises a HTTPError otherwise. + + Parameters + ---------- + endpoint : str + URL of API endpoint to call + headers : dict + Request headers + params : dict, optional + Request parameters + + Returns + ------- + dict + JSON of request result + + Raises + ------ + requests.HTTPError + If not r.ok, raise HTTPError with details of failure + """ r = requests.put(endpoint, headers=headers, params=params) if r.ok: return r.json() else: - raise ValueError( + raise requests.HTTPError( f"Request failed: URL {endpoint}, CODE {r.status_code}, CONTENT {r.content}" ) def get_gitlab_config(server="external"): + """Get gitlab server details and user account secrets + + Parameters + ---------- + server : str, optional + Which server to get secrets for either "internal" or, by default "external" + + Returns + ------- + dict + Secrets api_url, api_token, ip and headers. + + Raises + ------ + ValueError + If server is not a supported value + """ home = str(Path.home()) if server == "external": with open(f"{home}/.secrets/gitlab-external-ip-address", "r") as f: - ip = f.readlines()[0].strip() + ip = f.readlines()[0].strip() with open(f"{home}/.secrets/gitlab-external-api-token", "r") as f: - token = f.readlines()[0].strip() + token = f.readlines()[0].strip() elif server == "internal": with open(f"{home}/.secrets/gitlab-internal-ip-address", "r") as f: ip = f.readlines()[0].strip() @@ -139,6 +244,25 @@ def get_gitlab_config(server="external"): def get_group_id(group_name, config): + """Get the ID of a group on a gitlab server. + + Parameters + ---------- + group_name : str + Group name to find. + config : dict + Gitlab details and secrets as returned by get_gitlab_config + + Returns + ------- + int + Group ID for group_name + + Raises + ------ + ValueError + If group_name not found in the groups returned from the gitlab server. + """ endpoint = config["api_url"] + "groups" response = get_request(endpoint, headers=config["headers"]) for group in response: @@ -148,12 +272,39 @@ def get_group_id(group_name, config): def get_project(project_id, config): + """Get the details of a project from its ID. + + Parameters + ---------- + project_id : int + ID of the project on the gitlab server. + config : dict + Gitlab details and secrets as returned by get_gitlab_config + + Returns + ------- + dict + Project JSON as returned by the gitlab API. + """ endpoint = config["api_url"] + f"projects/{project_id}" project = get_request(endpoint, headers=config["headers"]) return project def get_merge_requests_for_approval(config): + """Get the details of all open merge requests into the approval group on + a gitlab server. + + Parameters + ---------- + config : dict + Gitlab details and secrets as returned by get_gitlab_config + + Returns + ------- + list + List of merge requests JSONs as returned by the gitlab API. + """ group = get_group_id("approval", config) endpoint = config["api_url"] + f"/groups/{group}/merge_requests" response = get_request( @@ -163,6 +314,22 @@ def get_merge_requests_for_approval(config): def count_unresolved_mr_discussions(mr, config): + """Count the number of unresolved discussions a merge request has. Requires + calling the discussions API endpoint for the merge request to determine + each comment's resolved status. + + Parameters + ---------- + mr : dict + A merge request JSON as returned by the gitlab API + config : dict + Gitlab details and secrets as returned by get_gitlab_config + + Returns + ------- + int + Number of unresolved discussions. + """ if mr["user_notes_count"] == 0: return 0 project_id = mr["project_id"] @@ -183,6 +350,22 @@ def count_unresolved_mr_discussions(mr, config): def accept_merge_request(mr, config): + """Accept a merge request + + Parameters + ---------- + mr : dict + For the merge request to approve: The merge request JSON as returned by + the gitlab API. + config : dict + Gitlab details and secrets as returned by get_gitlab_config + + Returns + ------- + dict + JSON response from gitlab API representing the status of the accepted + merge request. + """ project_id = mr["project_id"] mr_iid = mr["iid"] endpoint = ( @@ -192,17 +375,22 @@ def accept_merge_request(mr, config): def check_merge_requests(): + """Main function to check merge requests in the approval group on gitlab external, + approve them where appropriate, and then push the approved repos to gitlab + internal. + """ logger.info(f"STARTING RUN") try: - config = get_gitlab_config(server="external") + config_external = get_gitlab_config(server="external") + config_internal = get_gitlab_config(server="internal") except Exception as e: logger.critical(f"Failed to load gitlab secrets: {e}") return logger.info("Getting open merge requests for approval") try: - merge_requests = get_merge_requests_for_approval(config) + merge_requests = get_merge_requests_for_approval(config_external) except Exception as e: logger.critical(f"Failed to get merge requests: {e}") return @@ -212,19 +400,25 @@ def check_merge_requests(): logger.info("-" * 20) logger.info(f"Merge request {i+1} out of {len(merge_requests)}") try: - source_project = get_project(mr["source_project_id"], config) + # Extract merge request details + source_project = get_project(mr["source_project_id"], config_external) logger.info(f"Source Project: {source_project['name_with_namespace']}") logger.info(f"Source Branch: {mr['source_branch']}") - target_project = get_project(mr["project_id"], config) + target_project = get_project(mr["project_id"], config_external) logger.info(f"Target Project: {target_project['name_with_namespace']}") logger.info(f"Target Branch: {mr['target_branch']}") logger.info(f"Commit SHA: {mr['sha']}") logger.info(f"Created At: {mr['created_at']}") status = mr["merge_status"] - logger.info(f"Merge Status: {status}") + if status != "can_be_merged": + # Should never get merge conflicts so if we do something has + # gone wrong - log an error + logger.error(f"Merge Status: {status}") + else: + logger.info(f"Merge Status: {status}") wip = mr["work_in_progress"] logger.info(f"Work in Progress: {wip}") - unresolved = count_unresolved_mr_discussions(mr, config) + unresolved = count_unresolved_mr_discussions(mr, config_external) logger.info(f"Unresolved Discussions: {unresolved}") upvotes = mr["upvotes"] logger.info(f"Upvotes: {upvotes}") @@ -242,13 +436,14 @@ def check_merge_requests(): ): logger.info("Merge request has been approved. Proceeding with merge.") try: - result = accept_merge_request(mr, config) + result = accept_merge_request(mr, config_external) except Exception as e: logger.error(f"Merge failed! {e}") continue if result["state"] == "merged": logger.info(f"Merge successful! Merge SHA {result['merge_commit_sha']}") try: + # Save details of accepted merge request to separate log file with open("accepted_merge_requests.log", "a") as f: f.write( f"{result['merged_at']}, {source_project['name_with_namespace']}, {mr['source_branch']}, {mr['sha']}, {target_project['name_with_namespace']}, {mr['target_branch']}, {result['merge_commit_sha']}\n" @@ -256,9 +451,11 @@ def check_merge_requests(): except Exception as e: logger.error(f"Failed to log accepted merge request: {e}") try: + logger.info("Pushing project to gitlab internal.") internal_update_repo( target_project["ssh_url_to_repo"], - target_project["name"] + target_project["name"], + config_internal, ) except Exception as e: logger.error(f"Failed to push to internal: {e}") From a3c03234b7321b039a1c4c14da2714da5f583e35 Mon Sep 17 00:00:00 2001 From: Oliver Strickson Date: Mon, 11 May 2020 15:45:15 +0100 Subject: [PATCH 034/155] Add crontab entries for GitLab scripts --- .../cloud_init/cloud-init-gitlab-external.template.yaml | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml index d4552004f7..97df17b1f3 100644 --- a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml +++ b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml @@ -214,6 +214,13 @@ runcmd: # Give ownership of their home directory - | chown -R : "/home/"; + # -------------------------------- + # ADD CRONTAB ENTRIES FOR GITLAB SCRIPTS + # -------------------------------- + - echo "*** Adding zipfile_to_gitlab_project.py to crontab ***" + - echo "*/10 * * * * /home//zipfile_to_gitlab_project.py" >> /etc/crontab + - echo "*** Adding check_merge_requests.py to crontab ***" + - echo 5,15,25,35,45,55 * * * * /home//check_merge_requests.py" >> /etc/crontab # Shutdown so that we can tell when the job has finished by polling the VM state power_state: From f7dfccca4b9883fd2d65ade1946a18b75072c559 Mon Sep 17 00:00:00 2001 From: James Robinson Date: Mon, 11 May 2020 16:21:48 +0100 Subject: [PATCH 035/155] Switched gitlab airlock container name back to something simple as this does not have to be globally unique --- .../SRE_Upload_Git_Repo_to_GitlabExternal.ps1 | 2 +- deployment/common/Configuration.psm1 | 4 +++- environment_configs/full/sre_testasandbox_full_config.json | 6 ++++-- 3 files changed, 8 insertions(+), 4 deletions(-) diff --git a/deployment/administration/SRE_Upload_Git_Repo_to_GitlabExternal.ps1 b/deployment/administration/SRE_Upload_Git_Repo_to_GitlabExternal.ps1 index 1c4ec42a70..ecb0f66c1e 100644 --- a/deployment/administration/SRE_Upload_Git_Repo_to_GitlabExternal.ps1 +++ b/deployment/administration/SRE_Upload_Git_Repo_to_GitlabExternal.ps1 @@ -61,7 +61,7 @@ $sreStorageAccountName = $config.sre.storage.artifacts.accountName $sreStorageAccount = Deploy-StorageAccount -Name $sreStorageAccountName -ResourceGroupName $resourceGroupName -Location $config.sre.location # Create container if not already there -$containerName = $config.sre.storage.artifacts.gitlabAirlockContainerName +$containerName = $config.sre.storage.artifacts.containers.gitlabAirlockName Add-LogMessage -Level Info "Creating blob storage container $containerName in storage account $sreStorageAccountName ..." $_ = Deploy-StorageContainer -Name $containerName -StorageAccount $sreStorageAccount # delete existing blobs on the container diff --git a/deployment/common/Configuration.psm1 b/deployment/common/Configuration.psm1 index 1fa6e3a738..e62d2dc470 100644 --- a/deployment/common/Configuration.psm1 +++ b/deployment/common/Configuration.psm1 @@ -375,7 +375,9 @@ function Add-SreConfig { artifacts = [ordered]@{ rg = $storageRg accountName = "sre$($shm.id)artifacts${storageSuffix}".ToLower() | TrimToLength 24 - gitlabAirlockContainerName = "sre$($shm.id)gitlabairlock${storageSuffix}" + containers = [ordered]@{ + gitlabAirlockName = "gitlabairlock" + } } bootdiagnostics = [ordered]@{ rg = $storageRg diff --git a/environment_configs/full/sre_testasandbox_full_config.json b/environment_configs/full/sre_testasandbox_full_config.json index fcb5040bf9..48f7e0bfc9 100644 --- a/environment_configs/full/sre_testasandbox_full_config.json +++ b/environment_configs/full/sre_testasandbox_full_config.json @@ -207,8 +207,10 @@ "storage": { "artifacts": { "rg": "RG_SRE_ARTIFACTS", - "accountName": "sreartifactszvmwtqkigrad", - "gitlabAirlockContainerName": "gitlabairlock" + "accountName": "sreartifactszvmwtqkigrld", + "containers": { + "gitlabAirlockName": "gitlabairlock" + } }, "bootdiagnostics": { "rg": "RG_SRE_ARTIFACTS", From 35506382481a9bc8fcae3b475289833a2be41242 Mon Sep 17 00:00:00 2001 From: James Robinson Date: Tue, 12 May 2020 15:35:37 +0100 Subject: [PATCH 036/155] Fixed typo --- .../secure_research_environment/setup/Setup_SRE_VNET_RDS.ps1 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/deployment/secure_research_environment/setup/Setup_SRE_VNET_RDS.ps1 b/deployment/secure_research_environment/setup/Setup_SRE_VNET_RDS.ps1 index 44105a39bf..bb59d9943c 100644 --- a/deployment/secure_research_environment/setup/Setup_SRE_VNET_RDS.ps1 +++ b/deployment/secure_research_environment/setup/Setup_SRE_VNET_RDS.ps1 @@ -339,7 +339,7 @@ $params = @{ gatewayHostname = "`"$($config.sre.rds.gateway.hostname)`"" sh1Hostname = "`"$($config.sre.rds.sessionHost1.hostname)`"" sh2Hostname = "`"$($config.sre.rds.sessionHost2.hostname)`"" - sh3Hostname = "`"$($config.sre.rds.sessionHost2.hostname)`"" + sh3Hostname = "`"$($config.sre.rds.sessionHost3.hostname)`"" } $result = Invoke-RemoteScript -Shell "PowerShell" -ScriptPath $scriptPath -VMName $config.shm.dc.vmName -ResourceGroupName $config.shm.dc.rg -Parameter $params Write-Output $result.Value From e4d16e259ccce0767f60d89f68e00444753469b3 Mon Sep 17 00:00:00 2001 From: James Robinson Date: Fri, 15 May 2020 11:20:04 +0100 Subject: [PATCH 037/155] Added explicit ConnectionBroker argument as first argument to all RD commands --- .../Deploy_RDS_Environment.template.ps1 | 33 +++++++++---------- 1 file changed, 16 insertions(+), 17 deletions(-) diff --git a/deployment/secure_research_environment/remote/create_rds/templates/Deploy_RDS_Environment.template.ps1 b/deployment/secure_research_environment/remote/create_rds/templates/Deploy_RDS_Environment.template.ps1 index b36df54886..fdefbbc41f 100644 --- a/deployment/secure_research_environment/remote/create_rds/templates/Deploy_RDS_Environment.template.ps1 +++ b/deployment/secure_research_environment/remote/create_rds/templates/Deploy_RDS_Environment.template.ps1 @@ -21,12 +21,12 @@ Start-Service ShellHWDetection Write-Output "Removing any old RDS settings..." foreach ($collection in $(Get-RDSessionCollection -ErrorAction SilentlyContinue)) { Write-Output "... removing existing RDSessionCollection: '$($collection.CollectionName)'" - Remove-RDSessionCollection -CollectionName $collection.CollectionName -Force -ErrorAction SilentlyContinue + Remove-RDSessionCollection -ConnectionBroker "" -CollectionName $collection.CollectionName -Force -ErrorAction SilentlyContinue } foreach ($server in $(Get-RDServer -ErrorAction SilentlyContinue)) { Write-Output "... removing existing RDServer: '$($server.Server)'" foreach ($role in $server.Roles) { - Remove-RDServer -Server $server.Server -Role $role -Force -ErrorAction SilentlyContinue + Remove-RDServer -ConnectionBroker "" -Server $server.Server -Role $role -Force -ErrorAction SilentlyContinue } } @@ -37,12 +37,12 @@ Write-Output "Creating RDS Environment..." try { # Setup licensing server New-RDSessionDeployment -ConnectionBroker "" -WebAccessServer "" -SessionHost @("", "", "") -ErrorAction Stop - Add-RDServer -Server -Role RDS-LICENSING -ConnectionBroker -ErrorAction Stop - Set-RDLicenseConfiguration -LicenseServer -Mode PerUser -ConnectionBroker -Force -ErrorAction Stop + Add-RDServer -ConnectionBroker "" -Server "" -Role RDS-LICENSING -ErrorAction Stop + Set-RDLicenseConfiguration -ConnectionBroker "" -LicenseServer "" -Mode PerUser -Force -ErrorAction Stop # Setup gateway server $_ = Add-WindowsFeature -Name RDS-Gateway -IncludeAllSubFeature -ErrorAction Stop - Add-RDServer -Server -Role RDS-GATEWAY -ConnectionBroker -GatewayExternalFqdn -ErrorAction Stop - Set-RDWorkspace -Name "Safe Haven Applications" -ConnectionBroker + Add-RDServer -ConnectionBroker "" -Server "" -Role RDS-GATEWAY -GatewayExternalFqdn "" -ErrorAction Stop + Set-RDWorkspace -ConnectionBroker "" -Name "Safe Haven Applications" Write-Output " [o] RDS environment configuration update succeeded" } catch { Write-Output " [x] RDS environment configuration update failed!" @@ -69,9 +69,9 @@ foreach($rdsConfiguration in @(("Applications", "", " -ErrorAction Stop - $_ = Set-RDSessionCollectionConfiguration -CollectionName "$collectionName" -UserGroup "\$userGroup" -ClientPrinterRedirected $false -ClientDeviceRedirectionOptions None -DisconnectedSessionLimitMin 5 -IdleSessionLimitMin 720 -ConnectionBroker -ErrorAction Stop - $_ = Set-RDSessionCollectionConfiguration -CollectionName "$collectionName" -EnableUserProfileDisk -MaxUserProfileDiskSizeGB "20" -DiskPath "\\\$shareName" -ConnectionBroker -ErrorAction Stop + $_ = New-RDSessionCollection -ConnectionBroker "" -CollectionName "$collectionName" -SessionHost "$sessionHost" -ErrorAction Stop + $_ = Set-RDSessionCollectionConfiguration -ConnectionBroker "" -CollectionName "$collectionName" -UserGroup "\$userGroup" -ClientPrinterRedirected $false -ClientDeviceRedirectionOptions None -DisconnectedSessionLimitMin 5 -IdleSessionLimitMin 720 -ErrorAction Stop + $_ = Set-RDSessionCollectionConfiguration -ConnectionBroker "" -CollectionName "$collectionName" -EnableUserProfileDisk -MaxUserProfileDiskSizeGB "20" -DiskPath "\\\$shareName" -ErrorAction Stop Write-Output " [o] Creating '$collectionName' collection succeeded" } catch { Write-Output " [x] Creating '$collectionName' collection failed!" @@ -85,14 +85,13 @@ foreach($rdsConfiguration in @(("Applications", "", ".151" -CollectionName "Review" -ConnectionBroker -ErrorAction Stop - $_ = New-RDRemoteApp -Alias "mstsc (1)" -DisplayName "DSVM Main (Desktop)" -FilePath "C:\Windows\system32\mstsc.exe" -ShowInWebAccess 1 -CommandLineSetting Require -RequiredCommandLine "-v .160" -CollectionName "Applications" -ConnectionBroker -ErrorAction Stop - $_ = New-RDRemoteApp -Alias "mstsc (2)" -DisplayName "DSVM Other (Desktop)" -FilePath "C:\Windows\system32\mstsc.exe" -ShowInWebAccess 1 -CollectionName "Applications" -ConnectionBroker -ErrorAction Stop - $_ = New-RDRemoteApp -Alias "chrome (2)" -DisplayName "GitLab" -FilePath "C:\Program Files (x86)\Google\Chrome\Application\chrome.exe" -ShowInWebAccess 1 -CommandLineSetting Require -RequiredCommandLine "http://.151" -CollectionName "Applications" -ConnectionBroker -ErrorAction Stop - $_ = New-RDRemoteApp -Alias "chrome (3)" -DisplayName "HackMD" -FilePath "C:\Program Files (x86)\Google\Chrome\Application\chrome.exe" -ShowInWebAccess 1 -CommandLineSetting Require -RequiredCommandLine "http://.152:3000" -CollectionName "Applications" -ConnectionBroker -ErrorAction Stop - $_ = New-RDRemoteApp -Alias "putty (1)" -DisplayName "SSH (DSVM Main)" -FilePath "C:\Program Files\PuTTY\putty.exe" -ShowInWebAccess 1 -CommandLineSetting Require -RequiredCommandLine "-ssh .160" -CollectionName "Applications" -ConnectionBroker -ErrorAction Stop - $_ = New-RDRemoteApp -Alias "putty (2)" -DisplayName "SSH (DSVM Other)" -FilePath "C:\Program Files\PuTTY\putty.exe" -ShowInWebAccess 1 -CollectionName "Applications" -ConnectionBroker -ErrorAction Stop - # $_ = New-RDRemoteApp -Alias "WinSCP" -DisplayName "File Transfer" -FilePath "C:\Program Files (x86)\WinSCP\WinSCP.exe" -ShowInWebAccess 1 -CollectionName "Applications" -ConnectionBroker -ErrorAction Stop + $_ = New-RDRemoteApp -ConnectionBroker "" -Alias "chrome (1)" -DisplayName "Code Review" -FilePath "C:\Program Files (x86)\Google\Chrome\Application\chrome.exe" -ShowInWebAccess 1 -CommandLineSetting Require -RequiredCommandLine "http://.151" -CollectionName "Review" -ErrorAction Stop + $_ = New-RDRemoteApp -ConnectionBroker "" -Alias "mstsc (1)" -DisplayName "DSVM Main (Desktop)" -FilePath "C:\Windows\system32\mstsc.exe" -ShowInWebAccess 1 -CommandLineSetting Require -RequiredCommandLine "-v .160" -CollectionName "Applications" -ErrorAction Stop + $_ = New-RDRemoteApp -ConnectionBroker "" -Alias "mstsc (2)" -DisplayName "DSVM Other (Desktop)" -FilePath "C:\Windows\system32\mstsc.exe" -ShowInWebAccess 1 -CollectionName "Applications" -ErrorAction Stop + $_ = New-RDRemoteApp -ConnectionBroker "" -Alias "chrome (2)" -DisplayName "GitLab" -FilePath "C:\Program Files (x86)\Google\Chrome\Application\chrome.exe" -ShowInWebAccess 1 -CommandLineSetting Require -RequiredCommandLine "http://.151" -CollectionName "Applications" -ErrorAction Stop + $_ = New-RDRemoteApp -ConnectionBroker "" -Alias "chrome (3)" -DisplayName "HackMD" -FilePath "C:\Program Files (x86)\Google\Chrome\Application\chrome.exe" -ShowInWebAccess 1 -CommandLineSetting Require -RequiredCommandLine "http://.152:3000" -CollectionName "Applications" -ErrorAction Stop + $_ = New-RDRemoteApp -ConnectionBroker "" -Alias "putty (1)" -DisplayName "SSH (DSVM Main)" -FilePath "C:\Program Files\PuTTY\putty.exe" -ShowInWebAccess 1 -CommandLineSetting Require -RequiredCommandLine "-ssh .160" -CollectionName "Applications" -ErrorAction Stop + $_ = New-RDRemoteApp -ConnectionBroker "" -Alias "putty (2)" -DisplayName "SSH (DSVM Other)" -FilePath "C:\Program Files\PuTTY\putty.exe" -ShowInWebAccess 1 -CollectionName "Applications" -ErrorAction Stop Write-Output " [o] Registering applications succeeded" } catch { Write-Output " [x] Registering applications failed!" From bfe714766b94d62defb4883076468c34727215b4 Mon Sep 17 00:00:00 2001 From: James Robinson Date: Fri, 15 May 2020 11:27:56 +0100 Subject: [PATCH 038/155] Remove unused Set-RDPublishedName script --- .../templates/Set-RDPublishedName.ps1 | 77 ------------------- .../setup/Setup_SRE_VNET_RDS.ps1 | 1 - 2 files changed, 78 deletions(-) delete mode 100644 deployment/secure_research_environment/remote/create_rds/templates/Set-RDPublishedName.ps1 diff --git a/deployment/secure_research_environment/remote/create_rds/templates/Set-RDPublishedName.ps1 b/deployment/secure_research_environment/remote/create_rds/templates/Set-RDPublishedName.ps1 deleted file mode 100644 index 7f3627fae7..0000000000 --- a/deployment/secure_research_environment/remote/create_rds/templates/Set-RDPublishedName.ps1 +++ /dev/null @@ -1,77 +0,0 @@ -[CmdletBinding()] -Param( - [Parameter(Mandatory=$True,HelpMessage="Specifies the FQDN that clients will use when connecting to the deployment.",Position=1)] - [string]$ClientAccessName, - [Parameter(Mandatory=$False,HelpMessage="Specifies the RD Connection Broker server for the deployment.",Position=2)] - [string]$ConnectionBroker="localhost" -) - -$CurrentUser = New-Object Security.Principal.WindowsPrincipal $([Security.Principal.WindowsIdentity]::GetCurrent()) -If (($CurrentUser.IsInRole([Security.Principal.WindowsBuiltinRole]::Administrator)) -eq $false) -{ - $ArgumentList = "-noprofile -noexit -file `"{0}`" -ClientAccessName $ClientAccessName -ConnectionBroker $ConnectionBroker" - Start-Process powershell.exe -Verb RunAs -ArgumentList ($ArgumentList -f ($MyInvocation.MyCommand.Definition)) - Exit -} - -Function Get-RDMSDeployStringProperty ([string]$PropertyName, [string]$BrokerName) -{ - $ret = iwmi -Class "Win32_RDMSDeploymentSettings" -Namespace "root\CIMV2\rdms" -Name "GetStringProperty" ` - -ArgumentList @($PropertyName) -ComputerName $BrokerName ` - -Authentication PacketPrivacy -ErrorAction Stop - Return $ret.Value -} - -Try -{ - If ((Get-RDMSDeployStringProperty "DatabaseConnectionString" $ConnectionBroker) -eq $null) {$BrokerInHAMode = $False} Else {$BrokerInHAMode = $True} -} -Catch [System.Management.ManagementException] -{ - If ($Error[0].Exception.ErrorCode -eq "InvalidNamespace") - { - If ($ConnectionBroker -eq "localhost") - { - Write-Host "`n Set-RDPublishedName Failed.`n`n The local machine does not appear to be a Connection Broker. Please specify the`n FQDN of the RD Connection Broker using the -ConnectionBroker parameter.`n" -ForegroundColor Red - } - Else - { - Write-Host "`n Set-RDPublishedName Failed.`n`n $ConnectionBroker does not appear to be a Connection Broker. Please make sure you have `n specified the correct FQDN for your RD Connection Broker server.`n" -ForegroundColor Red - } - } - Else - { - $Error[0] - } - Exit -} - -$OldClientAccessName = Get-RDMSDeployStringProperty "DeploymentRedirectorServer" $ConnectionBroker - -If ($BrokerInHAMode.Value) -{ - Import-Module RemoteDesktop - Set-RDClientAccessName -ConnectionBroker $ConnectionBroker -ClientAccessName $ClientAccessName -} -Else -{ - $return = iwmi -Class "Win32_RDMSDeploymentSettings" -Namespace "root\CIMV2\rdms" -Name "SetStringProperty" ` - -ArgumentList @("DeploymentRedirectorServer",$ClientAccessName) -ComputerName $ConnectionBroker ` - -Authentication PacketPrivacy -ErrorAction Stop - $wksp = (gwmi -Class "Win32_Workspace" -Namespace "root\CIMV2\TerminalServices" -ComputerName $ConnectionBroker) - $wksp.ID = $ClientAccessName - $wksp.Put()|Out-Null -} - -$CurrentClientAccessName = Get-RDMSDeployStringProperty "DeploymentRedirectorServer" $ConnectionBroker - -If ($CurrentClientAccessName -eq $ClientAccessName) -{ - Write-Host "`n Set-RDPublishedName Succeeded." -ForegroundColor Green - Write-Host "`n Old name: $OldClientAccessName`n`n New name: $CurrentClientAccessName" - Write-Host "`n If you are currently logged on to RD Web Access, please refresh the page for the change to take effect.`n" -} -Else -{ - Write-Host "`n Set-RDPublishedName Failed.`n" -ForegroundColor Red -} \ No newline at end of file diff --git a/deployment/secure_research_environment/setup/Setup_SRE_VNET_RDS.ps1 b/deployment/secure_research_environment/setup/Setup_SRE_VNET_RDS.ps1 index bb59d9943c..444afc84a4 100644 --- a/deployment/secure_research_environment/setup/Setup_SRE_VNET_RDS.ps1 +++ b/deployment/secure_research_environment/setup/Setup_SRE_VNET_RDS.ps1 @@ -279,7 +279,6 @@ if ($?) { Add-LogMessage -Level Info "[ ] Uploading RDS gateway scripts to storage account '$($sreStorageAccount.StorageAccountName)'" Set-AzStorageBlobContent -Container $containerNameGateway -Context $sreStorageAccount.Context -File $deployScriptLocalFilePath -Blob "Deploy_RDS_Environment.ps1" -Force Set-AzStorageBlobContent -Container $containerNameGateway -Context $sreStorageAccount.Context -File $serverListLocalFilePath -Blob "ServerList.xml" -Force -Set-AzStorageBlobContent -Container $containerNameGateway -Context $sreStorageAccount.Context -File (Join-Path $PSScriptRoot ".." "remote" "create_rds" "templates" "Set-RDPublishedName.ps1") -Blob "Set-RDPublishedName.ps1" -Force if ($?) { Add-LogMessage -Level Success "File uploading succeeded" } else { From f5eb49635fba223b1a91ee5ddc5999bd41e75b35 Mon Sep 17 00:00:00 2001 From: James Robinson Date: Fri, 15 May 2020 11:58:56 +0100 Subject: [PATCH 039/155] Remove hard-coded drive letters --- .../templates/Deploy_RDS_Environment.template.ps1 | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/deployment/secure_research_environment/remote/create_rds/templates/Deploy_RDS_Environment.template.ps1 b/deployment/secure_research_environment/remote/create_rds/templates/Deploy_RDS_Environment.template.ps1 index fdefbbc41f..6ddd1e7bef 100644 --- a/deployment/secure_research_environment/remote/create_rds/templates/Deploy_RDS_Environment.template.ps1 +++ b/deployment/secure_research_environment/remote/create_rds/templates/Deploy_RDS_Environment.template.ps1 @@ -52,9 +52,10 @@ try { # Create collections # ------------------ -foreach($rdsConfiguration in @(("Applications", "", "", "F:\AppFileShares"), - ("Windows (Desktop)", "", "", "G:\RDPFileShares"), - ("Review", "", "", "H:\ReviewFileShares"))) { +$driveLetters = Get-Volume | Where-Object { $_.FileSystemLabel -Like "DATA-[0-9]" } | ForEach-Object { $_.DriveLetter } | Sort +foreach($rdsConfiguration in @(("Applications", "", "", "$($driveLetters[0]):\AppFileShares"), + ("Windows (Desktop)", "", "", "$($driveLetters[1]):\RDPFileShares"), + ("Review", "", "", "$($driveLetters[2]):\ReviewFileShares"))) { $collectionName, $sessionHost, $userGroup, $sharePath = $rdsConfiguration # Setup user profile disk shares From 2e45544ad67288a762cdaef1ff4d2cb2bc57d198 Mon Sep 17 00:00:00 2001 From: nbarlowATI Date: Thu, 14 May 2020 14:16:52 +0100 Subject: [PATCH 040/155] refactor script to upload git project to gitlab-external (git clone before pushing) --- .../scripts/zipfile_to_gitlab_project.py | 327 +++++++++++------- 1 file changed, 204 insertions(+), 123 deletions(-) diff --git a/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py b/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py index 449b20486c..04d2f58056 100644 --- a/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py +++ b/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py @@ -9,11 +9,15 @@ 1) get useful gitlab stuff (url, api key, namespace_ids for our groups) 2) unzip zipfiles in specified directory 3) loop over unzipped repos. For each one: - a) see if "unapproved" project with same name exists, if not, create it - b) commit and push to "unapproved" project, branch=commit_hash - c) see "approval" project with same name exists, if not, create it - d) create branch=desired_branch_name on "approval" project - e) create merge request from unapproved/repo_name/commit_hash to + a) see if "approval" project with same name exists, if not, create it, and branch "" + b) check if merge request to "approval/" with source and target branches + "commit-" and "" already exists. + If so, skip to the next unzipped repo. + b) see if "unapproved" project with same name exists, if not, fork "approval" one + c) clone "unapproved" project, and create branch called "commit-" + d) copy in contents of unzipped repo. + e) git add, commit and push to "unapproved" project + f) create merge request from unapproved/repo_name/commit_hash to approval/repo_name/desired_branch_name 4) clean up - remove zipfiles and unpacked repos. """ @@ -43,12 +47,12 @@ logger.addHandler(c_handler) -def unzip_zipfiles(zipfile_dir, tmp_repo_dir): +def unzip_zipfiles(zipfile_dir, tmp_unzipped_dir): """ Parameters ========== zipfile_dir: str, path to directory containing zipfiles - tmp_repo_dir: str, path to directory where zipfiles will be unzipped + tmp_unzipped_dir: str, path to directory where zipfiles will be unzipped Returns ======= @@ -61,14 +65,14 @@ def unzip_zipfiles(zipfile_dir, tmp_repo_dir): output_list = [] repo_commit_regex = re.compile("([-\w]+)_([a-f\d]+)_([\S]+).zip") # tear down and recreate the directory where we will put the unpacked zip - shutil.rmtree(tmp_repo_dir, ignore_errors=True) - os.makedirs(tmp_repo_dir) + shutil.rmtree(tmp_unzipped_dir, ignore_errors=True) + os.makedirs(tmp_unzipped_dir) # look in a directory for zipfiles zipfiles = os.listdir(zipfile_dir) for zipfile in zipfiles: filename_match = repo_commit_regex.search(zipfile) if not filename_match: - print("Badly named zipfile! {}".format(zipfile)) + logger.info("Badly named zipfile! {}".format(zipfile)) continue repo_name, commit_hash, branch = filename_match.groups() @@ -76,13 +80,13 @@ def unzip_zipfiles(zipfile_dir, tmp_repo_dir): try: zipfile_path = os.path.join(zipfile_dir, zipfile) with ZipFile(zipfile_path, 'r') as zip_obj: - zip_obj.extractall(path=tmp_repo_dir) + zip_obj.extractall(path=tmp_unzipped_dir) # we should have made a new directory - find its name - unpacked_zips = os.listdir(tmp_repo_dir) - unpacked_location = os.path.join(tmp_repo_dir, unpacked_zips[0]) + unpacked_zips = os.listdir(tmp_unzipped_dir) + unpacked_location = os.path.join(tmp_unzipped_dir, unpacked_zips[0]) output_list.append((repo_name, commit_hash, branch, unpacked_location)) except(BadZipFile): - print("Bad zipfile: {}".format(zipfile)) + logger.info("Bad zipfile: {}".format(zipfile)) continue return output_list @@ -300,7 +304,7 @@ def create_project(repo_name, namespace_id, gitlab_url, gitlab_token): ) assert(response.json()["name"] == repo_name) project_info = response.json() - print("Created project {} in namespace {}, project_id {}".\ + logger.info("Created project {} in namespace {}, project_id {}".\ format(repo_name, namespace_id, project_info["id"])) return project_info @@ -373,9 +377,7 @@ def create_branch(branch_name, return branch_info -def check_if_merge_request_exists(repo_name, - source_project_id, - source_branch, +def check_if_merge_request_exists(source_branch, target_project_id, target_branch, gitlab_url, gitlab_token): @@ -385,10 +387,6 @@ def check_if_merge_request_exists(repo_name, Parameters ========== - repo_name: str, name of the repository - source_project_id: int, project_id for the unapproved project, obtainable - as the "ID" field of the json returned from the - projects API endpoint. source_branch: str, name of the branch on source project, will typically be the commit_hash from the original repo. target_project_id: int, project_id for the "approval" group's project. @@ -410,7 +408,7 @@ def check_if_merge_request_exists(repo_name, for mr in response.json(): if mr["source_branch"] == source_branch and \ mr["target_branch"] == target_branch: - print("Merge request {} -> {} already exists".\ + logger.info("Merge request {} -> {} already exists".\ format(source_branch, target_branch)) return True return False @@ -471,7 +469,7 @@ def create_merge_request(repo_name, # response.content)) ##### TEMPORARY - don't raise an error here - we get 500 status code ##### even though MR is created it - under investigation. - print("Problem creating Merge Request {} {} {}: {}"\ + logger.info("Problem creating Merge Request {} {} {}: {}"\ .format(repo_name, source_branch,target_branch, response.content)) return {} @@ -479,7 +477,7 @@ def create_merge_request(repo_name, return mr_info -def push_to_remote(path_to_unzipped_repo, branch_name, remote_url): +def clone_commit_and_push(repo_name, path_to_unzipped_repo, tmp_repo_dir, branch_name, remote_url): """ Run shell commands to convert the unzipped directory containing the repository contents into a git repo, then commit it to a branch named @@ -487,81 +485,86 @@ def push_to_remote(path_to_unzipped_repo, branch_name, remote_url): Parameters ========== + repo_name: str, name of the repository/project path_to_unzipped_repo: str, the full directory path to the unzipped repo + tmp_repo_dir: str, path to a temporary dir where we will clone the project branch_name: str, original commit hash from the external git repo, will be used as the name of the branch to push to remote_url: str, the URL for this project on gitlab-external to be added as a "remote". """ - subprocess.run(["git","init"], cwd=path_to_unzipped_repo, check=True) + # Clone the repo + subprocess.run(["git","clone",remote_url],cwd=tmp_repo_dir, check=True) + working_dir = os.path.join(tmp_repo_dir, repo_name) + assert os.path.exists(working_dir) + # Copy the unzipped repo contents into our cloned (empty) repo + subprocess.run("cp","-r",os.path.join(path_to_unzipped_repo,"*"),".", + cwd=working_dir, check=True) # Create a branch named after the original commit hash subprocess.run(["git","checkout","-b",branch_name], - cwd=path_to_unzipped_repo, check=True) + cwd=working_dir, check=True) # Commit everything to this branch, also putting commit hash into message - subprocess.run(["git","add","."], cwd=path_to_unzipped_repo, check=True) + subprocess.run(["git","add","."], cwd=working_dir, check=True) commit_msg = "Committing to branch {}".format(branch_name) subprocess.run(["git","commit","-m", commit_msg], - cwd=path_to_unzipped_repo, check=True) - # add the remote_url as a remote called 'gitlab-external' - subprocess.run(["git","remote","add","gitlab-external",remote_url], - cwd=path_to_unzipped_repo, check=True) - # Push to gitlab external - subprocess.run(["git","push","--force","--all","gitlab-external"], - cwd=path_to_unzipped_repo, check=True) - - -def create_and_push_unapproved_project(repo_name, - namespace_id, - gitlab_url, - gitlab_token, - path_to_unzipped_repo, - branch_name): - """ - We have unzipped a zipfile, and put the contents (i.e. the code we want - to push) in path_to_unzipped_project. - Now we create the project in the "unapproved" group on Gitlab, and push - to it. + cwd=working_dir, check=True) + # Push back to gitlab external + subprocess.run(["git","push","--force","--all","--set-upstream","origin",branch_name], + cwd=working_dir, check=True) + + +def fork_project(repo_name, project_id, namespace_id, + gitlab_url, gitlab_token): + """ + Fork the project 'approval/' to 'unapproved/' + after first checking whether the latter exists. Parameters ========== - repo_name: str, name of our repository/project - gitlab_url: str, the base URL of Gitlab API + repo_name: str, name of the repo/project + project_id: int, project id of the 'approval/' project + namespace_id: int, id of the 'unapproved' namespace + gitlab_url: str, str, the base URL of Gitlab API gitlab_token: str, API token for Gitlab API - path_to_unzipped_repo: str, full directory path to code we want to commit - branch_name: str, the commit hash from the original repo, to be used as - the name of the branch we'll push to Returns ======= - project_id: int, ID of the project as returned by projects API endpoint + new_project_id: int, the id of the newly created 'unapproved/' project """ - # Get project ID - project will be created if it didn't already exist - project_id = get_project_id(repo_name, namespace_id, gitlab_url, gitlab_token) - assert project_id - # see if branch already exists with name=commit_hash - branch_exists = check_if_branch_exists(branch_name, - project_id, - gitlab_url, - gitlab_token) - if branch_exists: - print("Branch {} already exists".format(branch_name)) - # already exists - do nothing - return project_id - # otherwise we need to commit code to it and push - remote_url = get_project_remote_url(repo_name, namespace_id, + already_exists = check_if_project_exists(repo_name, namespace_id, gitlab_url, gitlab_token) + if not already_exists: + fork_url = "{}/projects/{}/fork".format(gitlab_url, project_id) + response = requests.post(fork_url, + headers = {"Authorization": "Bearer "+gitlab_token}, + data = {"namespace_id": namespace_id}) + if response.status_code != 201: + raise RuntimeError("Problem creating fork: {}".format(response.content)) + new_project_id = response.json()["id"] + return new_project_id + else: + # project already exists - ensure it is a fork of 'approval/' + new_project_id = get_project_id(repo_name, namespace_id, gitlab_url, gitlab_token) - print("remote URL for {} is {}".format(repo_name, remote_url)) + fork_url = "{}/projects/{}/fork/{}".format(gitlab_url, + new_project_id, + project_id) + response = requests.post(fork_url, + headers = {"Authorization": "Bearer "+gitlab_token}) + # status code 201 if fork relationship created, or 409 if already there + if (response.status_code != 201) and (response.status_code != 409): + raise RuntimeError("Unable to create fork relationship: {} {}".\ + format(response.status_code, response.content)) + + return new_project_id - push_to_remote(path_to_unzipped_repo, branch_name, remote_url) - # Return the project_id, to use in merge request - return project_id -def create_approved_project_branch(repo_name, - branch_name, - namespace_id, - gitlab_url, - gitlab_token): +def create_project_and_branch(repo_name, + branch_name, + namespace_id, + gitlab_url, + gitlab_token): + """ Create a new branch (and a new project if it doesn't already exist) owned by the "approval" group. This will be the target for the merge @@ -597,17 +600,128 @@ def create_approved_project_branch(repo_name, return project_id +def unzipped_repo_to_merge_request(repo_details, + tmp_repo_dir, + gitlab_config, + namespace_ids, + group_names): + """ + Go through all the steps for a single repo/project. + + Parameters + ========== + repo_details: tuple of strings, (repo_name, hash, desired_branch, location) + tmp_repo_dir: str, directory where we will clone the repo, then copy the contents in + gitlab_config: dict, contains api url and token + namespace_ids; dict, keys are the group names (e.g. "unapproved", "approval", values + are the ids of the corresponding namespaces in Gitlab + group_names: list of strings, typically ["unapproved", "approval"] + """ + + # unpack tuple + repo_name, commit_hash, branch_name, unzipped_location = repo_details + logger.info("Unpacked {} {} {}".format(repo_name, commit_hash, branch_name)) + # create project and branch on approved repo + target_project_id = create_project_and_branch(repo_name, + branch_name, + namespace_ids[group_names[1]], + gitlab_config["api_url"], + gitlab_config["api_token"]) + logger.info("Created project {}/{} branch {}".\ + format(group_names[1],repo_name, branch_name)) + + # Check if we already have a Merge Request - if so we can just skip to the end + unapproved_branch_name = "branch-{}".format(commit_hash) + mr_exists = check_if_merge_request_exists(unapproved_branch_name, + target_project_id, + target_branch, + gitlab_config["api_url"], + gitlab_config["api_token"]) + if mr_exists: + logger.info("Merge Request for {} {} to {} already exists - skipping".\ + format(repo_name, + unapproved_branch_name, + target_branch)) + return + + # If we got here, MR doesn't already exist - go through the rest of the steps. + + # Fork this project to "unapproved" group + src_project_id = fork_project(repo_name, + target_project_id, + namespace_ids[group_names[0]], + gitlab_config["api_url"], + gitlab_config["api_token"]) + + logger.info("Forked to project {}/{}".\ + format(group_names[0],repo_name)) + # Get the remote URL for the unapproved project + remote_url = get_project_remote_url(repo_name, + namespace_ids[group_names[0]], + gitlab_config["api_url"], + gitlab_config["api_token"]) + + # Do the command-line git stuff to push to unapproved project + clone_commit_and_push(repo_name, + unzipped_location, + tmp_repo_dir, + unapproved_branch_name, + remote_url) + logger.info("Pushed to {}/{} branch {}".format(group_names[0],repo_name, unapproved_branch_name)) + + # Create the merge request + + create_merge_request(repo_name, + src_project_id, + unapproved_branch_name, + target_project_id, + branch_name, + gitlab_config["api_url"], + gitlab_config["api_token"]) + logger.info("Created merge request {} -> {}".\ + format(commit_hash, branch_name)) + + return True + + +def cleanup(zipfile_dir, tmp_unzipped_dir, tmp_repo_dir): + """ + Remove directories and files after everything has been uploaded to gitlab + + Parameters + ========== + zipfile_dir: str, directory containing the original zipfiles. Will not remove this + directory, but we will delete all the zipfiles in it. + tmp_unzipped_dir: str, directory where the unpacked zipfile contents are put. Remove. + tmp_repo_dir: str, directory where projects are cloned from Gitlab, then contents from + tmp_unzipped_dir are copied in. Remove. + """ + logger.info(" === cleaning up ======") + shutil.rmtree(tmp_unzipped_dir) + logger.info("Removed directory {}".format(tmp_unzipped_dir)) + shutil.rmtree(tmp_repo_dir) + logger.info("Removed directory {}".format(tmp_repo_dir)) + for filename in os.listdir(zipfile_dir): + filepath = os.path.join(zipfile_dir, filename) + subprocess.run(["rm",filepath], check=True) + logger.info("Removed file {}".format(filepath)) + return True + + def main(): + ZIPFILE_DIR = "/zfiles" # create a directory to unpack the zipfiles into + TMP_UNZIPPED_DIR = "/tmp/unzipped" + os.makedirs(TMP_UNZIPPED_DIR, exist_ok=True) + # and a directory where we will clone projects, then copy file contents in TMP_REPO_DIR = "/tmp/repos" os.makedirs(TMP_REPO_DIR, exist_ok=True) # get the gitlab config config = get_gitlab_config() - ZIPFILE_DIR = "/zfiles" # unzip the zipfiles, and retrieve a list of tuples describing # (repo_name, commit_hash, desired_branch, unzipped_location) - unzipped_repos = unzip_zipfiles(ZIPFILE_DIR, TMP_REPO_DIR) + unzipped_repos = unzip_zipfiles(ZIPFILE_DIR, TMP_UNZIPPED_DIR) # get the namespace_ids of our "approval" and "unapproved" groups GROUPS = ["unapproved","approval"] @@ -616,50 +730,17 @@ def main(): GROUPS) # loop over all our newly unzipped repositories - for repo in unzipped_repos: - # unpack tuple - repo_name, commit_hash, branch_name, location = repo - print("Unpacked {} {} {}".format(repo_name, commit_hash, branch_name)) - unapproved_branch_name = "branch-{}".format(commit_hash) - src_project_id = create_and_push_unapproved_project(repo_name, - namespace_ids[GROUPS[0]], - config["api_url"], - config["api_token"], - location, - unapproved_branch_name) - print("Created project {}/{} branch {}".\ - format(GROUPS[0],repo_name, unapproved_branch_name)) - - # create project and branch on approved repo - target_project_id = create_approved_project_branch(repo_name, - branch_name, - namespace_ids[GROUPS[1]], - config["api_url"], - config["api_token"]) - print("Created project {}/{} branch {}".\ - format(GROUPS[1],repo_name, branch_name)) - - mr_exists = check_if_merge_request_exists(repo_name, - src_project_id, - commit_hash, - target_project_id, - branch_name, - config["api_url"], - config["api_token"]) - if mr_exists: - print("Merge request {} -> {} already exists. skipping".\ - format(commit_hash, branch_name)) - else: - # create merge request - create_merge_request(repo_name, - src_project_id, - unapproved_branch_name, - target_project_id, - branch_name, - config["api_url"], - config["api_token"]) - print("Created merge request {} -> {}".\ - format(commit_hash, branch_name)) + for repo_details in unzipped_repos: + # call function to go through all the project/branch/mr creation etc. + unzipped_repo_to_merge_request(repo_details, + TMP_REPO_DIR, + config, + namespace_ids, + GROUPS) + + # cleanup + cleanup(ZIPFILE_DIR, TMP_UNZIPPED_DIR, TMP_REPO_DIR) + if __name__ == "__main__": main() From f9b685a735802aa44b5ef675290a6b02d462de7c Mon Sep 17 00:00:00 2001 From: nbarlowATI Date: Thu, 14 May 2020 16:40:20 +0100 Subject: [PATCH 041/155] minor fixes after testing locally --- .../cloud_init/scripts/zipfile_to_gitlab_project.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py b/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py index 04d2f58056..8ba03164a2 100644 --- a/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py +++ b/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py @@ -100,9 +100,9 @@ def get_gitlab_config(): with open(f"{home}/.secrets/gitlab-external-ip-address", "r") as f: ip = f.readlines()[0].strip() + with open(f"{home}/.secrets/gitlab-external-api-token", "r") as f: token = f.readlines()[0].strip() - api_url = f"http://{ip}/api/v4/" headers = {"Authorization": "Bearer " + token} @@ -498,8 +498,9 @@ def clone_commit_and_push(repo_name, path_to_unzipped_repo, tmp_repo_dir, branch working_dir = os.path.join(tmp_repo_dir, repo_name) assert os.path.exists(working_dir) # Copy the unzipped repo contents into our cloned (empty) repo - subprocess.run("cp","-r",os.path.join(path_to_unzipped_repo,"*"),".", - cwd=working_dir, check=True) + for item in os.listdir(path_to_unzipped_repo): + subprocess.run(["cp","-r",os.path.join(path_to_unzipped_repo,item),"."], + cwd=working_dir, check=True) # Create a branch named after the original commit hash subprocess.run(["git","checkout","-b",branch_name], cwd=working_dir, check=True) @@ -509,7 +510,7 @@ def clone_commit_and_push(repo_name, path_to_unzipped_repo, tmp_repo_dir, branch subprocess.run(["git","commit","-m", commit_msg], cwd=working_dir, check=True) # Push back to gitlab external - subprocess.run(["git","push","--force","--all","--set-upstream","origin",branch_name], + subprocess.run(["git","push","--set-upstream","origin",branch_name], cwd=working_dir, check=True) @@ -634,7 +635,7 @@ def unzipped_repo_to_merge_request(repo_details, unapproved_branch_name = "branch-{}".format(commit_hash) mr_exists = check_if_merge_request_exists(unapproved_branch_name, target_project_id, - target_branch, + branch_name, gitlab_config["api_url"], gitlab_config["api_token"]) if mr_exists: @@ -709,6 +710,7 @@ def cleanup(zipfile_dir, tmp_unzipped_dir, tmp_repo_dir): def main(): + ZIPFILE_DIR = "/zfiles" # create a directory to unpack the zipfiles into TMP_UNZIPPED_DIR = "/tmp/unzipped" From c6c2ca70c8e885aac02c36f0a35398e2f26bd7f6 Mon Sep 17 00:00:00 2001 From: Jack Roberts Date: Fri, 15 May 2020 18:15:54 +0100 Subject: [PATCH 042/155] Add missing quotation mark --- .../cloud_init/cloud-init-gitlab-external.template.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml index 97df17b1f3..6aaf646915 100644 --- a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml +++ b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml @@ -220,7 +220,7 @@ runcmd: - echo "*** Adding zipfile_to_gitlab_project.py to crontab ***" - echo "*/10 * * * * /home//zipfile_to_gitlab_project.py" >> /etc/crontab - echo "*** Adding check_merge_requests.py to crontab ***" - - echo 5,15,25,35,45,55 * * * * /home//check_merge_requests.py" >> /etc/crontab + - echo "5,15,25,35,45,55 * * * * /home//check_merge_requests.py" >> /etc/crontab # Shutdown so that we can tell when the job has finished by polling the VM state power_state: From 9efc44790b2e95551ae93f1bce8d946beb40c7ca Mon Sep 17 00:00:00 2001 From: nbarlowATI Date: Mon, 18 May 2020 10:27:29 +0100 Subject: [PATCH 043/155] Use the 'artifacts' resource group for the blob storage when uploading git repo --- .../administration/SRE_Upload_Git_Repo_to_GitlabExternal.ps1 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/deployment/administration/SRE_Upload_Git_Repo_to_GitlabExternal.ps1 b/deployment/administration/SRE_Upload_Git_Repo_to_GitlabExternal.ps1 index ecb0f66c1e..3bb50a1478 100644 --- a/deployment/administration/SRE_Upload_Git_Repo_to_GitlabExternal.ps1 +++ b/deployment/administration/SRE_Upload_Git_Repo_to_GitlabExternal.ps1 @@ -58,7 +58,7 @@ $gitlabExternalVmName = $config.sre.webapps.gitlab.external.vmName # Go via blob storage - first create storage account if not already there $resourceGroupName = $config.sre.webapps.rg $sreStorageAccountName = $config.sre.storage.artifacts.accountName -$sreStorageAccount = Deploy-StorageAccount -Name $sreStorageAccountName -ResourceGroupName $resourceGroupName -Location $config.sre.location +$sreStorageAccount = Deploy-StorageAccount -Name $sreStorageAccountName -ResourceGroupName $config.sre.storage.artifacts.rg -Location $config.sre.location # Create container if not already there $containerName = $config.sre.storage.artifacts.containers.gitlabAirlockName From a15a7031bf9905f2416e9a3e3137b6b7a172ee68 Mon Sep 17 00:00:00 2001 From: nbarlowATI Date: Mon, 18 May 2020 10:29:55 +0100 Subject: [PATCH 044/155] protect against non-existing zipfile directory --- .../scripts/zipfile_to_gitlab_project.py | 21 ++++++++++++------- 1 file changed, 14 insertions(+), 7 deletions(-) diff --git a/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py b/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py index 8ba03164a2..92b4187b18 100644 --- a/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py +++ b/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py @@ -68,7 +68,11 @@ def unzip_zipfiles(zipfile_dir, tmp_unzipped_dir): shutil.rmtree(tmp_unzipped_dir, ignore_errors=True) os.makedirs(tmp_unzipped_dir) # look in a directory for zipfiles - zipfiles = os.listdir(zipfile_dir) + try: + zipfiles = os.listdir(zipfile_dir) + except(FileNotFoundError): + logger.info("Zipfile dir {} not found - assume nothing to unzip".format(zipfile_dir)) + return [] for zipfile in zipfiles: filename_match = repo_commit_regex.search(zipfile) if not filename_match: @@ -698,14 +702,17 @@ def cleanup(zipfile_dir, tmp_unzipped_dir, tmp_repo_dir): tmp_unzipped_dir are copied in. Remove. """ logger.info(" === cleaning up ======") - shutil.rmtree(tmp_unzipped_dir) + shutil.rmtree(tmp_unzipped_dir, ignore_errors=True) logger.info("Removed directory {}".format(tmp_unzipped_dir)) - shutil.rmtree(tmp_repo_dir) + shutil.rmtree(tmp_repo_dir, ignore_errors=True) logger.info("Removed directory {}".format(tmp_repo_dir)) - for filename in os.listdir(zipfile_dir): - filepath = os.path.join(zipfile_dir, filename) - subprocess.run(["rm",filepath], check=True) - logger.info("Removed file {}".format(filepath)) + try: + for filename in os.listdir(zipfile_dir): + filepath = os.path.join(zipfile_dir, filename) + subprocess.run(["rm",filepath], check=True) + logger.info("Removed file {}".format(filepath)) + except(FileNotFoundError): + logger.info("Zipfile directory {} not found - skipping".format(zipfile_dir)) return True From 3bc913c9c42e6c6b09c4e1e65f6b49e66817c523 Mon Sep 17 00:00:00 2001 From: nbarlowATI Date: Mon, 18 May 2020 10:47:02 +0100 Subject: [PATCH 045/155] test using webapps resource group for blob upload --- .../SRE_Upload_Git_Repo_to_GitlabExternal.ps1 | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/deployment/administration/SRE_Upload_Git_Repo_to_GitlabExternal.ps1 b/deployment/administration/SRE_Upload_Git_Repo_to_GitlabExternal.ps1 index 3bb50a1478..bdccc9e68e 100644 --- a/deployment/administration/SRE_Upload_Git_Repo_to_GitlabExternal.ps1 +++ b/deployment/administration/SRE_Upload_Git_Repo_to_GitlabExternal.ps1 @@ -57,8 +57,10 @@ Set-Location $workingDir $gitlabExternalVmName = $config.sre.webapps.gitlab.external.vmName # Go via blob storage - first create storage account if not already there $resourceGroupName = $config.sre.webapps.rg -$sreStorageAccountName = $config.sre.storage.artifacts.accountName -$sreStorageAccount = Deploy-StorageAccount -Name $sreStorageAccountName -ResourceGroupName $config.sre.storage.artifacts.rg -Location $config.sre.location +#$sreStorageAccountName = $config.sre.storage.artifacts.accountName +$sreStorageAccountName = "gitlabingresstest18052020" +#$sreStorageAccount = Deploy-StorageAccount -Name $sreStorageAccountName -ResourceGroupName $config.sre.storage.artifacts.rg -Location $config.sre.location +$sreStorageAccount = Deploy-StorageAccount -Name $sreStorageAccountName -ResourceGroupName $resourceGroupName -Location $config.sre.location # Create container if not already there $containerName = $config.sre.storage.artifacts.containers.gitlabAirlockName @@ -84,6 +86,7 @@ if ($numBlobs -gt 0) { # copy zipfile to blob storage # ---------------------------- Add-LogMessage -Level Info "Upload zipfile to storage..." + Set-AzStorageBlobContent -Container $containerName -Context $sreStorageAccount.Context -File $zipFilePath -Blob $zipFileName -Force # Download zipfile onto the remote machine From 29062642aa80f37195c363cc9107edd50756774a Mon Sep 17 00:00:00 2001 From: nbarlowATI Date: Mon, 18 May 2020 11:07:27 +0100 Subject: [PATCH 046/155] revert to using artifacts resource group for gitlab zip upload --- .../SRE_Upload_Git_Repo_to_GitlabExternal.ps1 | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/deployment/administration/SRE_Upload_Git_Repo_to_GitlabExternal.ps1 b/deployment/administration/SRE_Upload_Git_Repo_to_GitlabExternal.ps1 index bdccc9e68e..438b16ff1b 100644 --- a/deployment/administration/SRE_Upload_Git_Repo_to_GitlabExternal.ps1 +++ b/deployment/administration/SRE_Upload_Git_Repo_to_GitlabExternal.ps1 @@ -56,11 +56,9 @@ Set-Location $workingDir $gitlabExternalVmName = $config.sre.webapps.gitlab.external.vmName # Go via blob storage - first create storage account if not already there -$resourceGroupName = $config.sre.webapps.rg -#$sreStorageAccountName = $config.sre.storage.artifacts.accountName -$sreStorageAccountName = "gitlabingresstest18052020" -#$sreStorageAccount = Deploy-StorageAccount -Name $sreStorageAccountName -ResourceGroupName $config.sre.storage.artifacts.rg -Location $config.sre.location -$sreStorageAccount = Deploy-StorageAccount -Name $sreStorageAccountName -ResourceGroupName $resourceGroupName -Location $config.sre.location +$storageResourceGroupName = $config.sre.storage.artifacts.rg +$sreStorageAccountName = $config.sre.storage.artifacts.accountName +$sreStorageAccount = Deploy-StorageAccount -Name $sreStorageAccountName -ResourceGroupName $storageResourceGroupName -Location $config.sre.location # Create container if not already there $containerName = $config.sre.storage.artifacts.containers.gitlabAirlockName @@ -92,7 +90,7 @@ Set-AzStorageBlobContent -Container $containerName -Context $sreStorageAccount.C # Download zipfile onto the remote machine # ---------------------------------------- # Get a SAS token and construct URL -$sasToken = New-ReadOnlyAccountSasToken -ResourceGroup $resourceGroupName -AccountName $sreStorageAccount.StorageAccountName -SubscriptionName $config.sre.subscriptionName +$sasToken = New-ReadOnlyAccountSasToken -ResourceGroup $storageResourceGroupName -AccountName $sreStorageAccount.StorageAccountName -SubscriptionName $config.sre.subscriptionName $remoteUrl = "https://$($sreStorageAccount.StorageAccountName).blob.core.windows.net/${containerName}/${zipFileName}${sasToken}" Add-LogMessage -Level Info "Got SAS token and URL $remoteUrl" @@ -103,6 +101,7 @@ mkdir -p /zfiles curl -X GET -o /zfiles/${zipFileName} "${remoteUrl}" "@ +$resourceGroupName = $config.sre.webapps.rg Add-LogMessage -Level Info "[ ] Running remote script to download zipfile onto $gitlabExternalVmName" $result = Invoke-RemoteScript -Shell "UnixShell" -Script $script -VMName $gitlabExternalVmName -ResourceGroupName $resourceGroupName From 6182e045ce27c49d13f0a825e14f7e091993d539 Mon Sep 17 00:00:00 2001 From: Jack Roberts Date: Mon, 18 May 2020 12:46:11 +0100 Subject: [PATCH 047/155] add path to create_gitlab_project to force correct case being preserved --- .../cloud_init/scripts/zipfile_to_gitlab_project.py | 1 + 1 file changed, 1 insertion(+) diff --git a/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py b/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py index 92b4187b18..054f3743d2 100644 --- a/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py +++ b/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py @@ -303,6 +303,7 @@ def create_project(repo_name, namespace_id, gitlab_url, gitlab_token): response = requests.post(projects_url, headers = {"Authorization": "Bearer "+gitlab_token}, data = {"name": repo_name, + "path": repo_name, "visibility": "public", "namespace_id": namespace_id} ) From bdd6f658884d707d974cabb66f8c33ac2f0e11c9 Mon Sep 17 00:00:00 2001 From: nbarlowATI Date: Mon, 18 May 2020 13:31:55 +0100 Subject: [PATCH 048/155] create branch on approval project after fork to unapproved --- .../scripts/zipfile_to_gitlab_project.py | 104 +++++++----------- 1 file changed, 38 insertions(+), 66 deletions(-) diff --git a/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py b/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py index 92b4187b18..320cf592fb 100644 --- a/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py +++ b/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py @@ -9,7 +9,7 @@ 1) get useful gitlab stuff (url, api key, namespace_ids for our groups) 2) unzip zipfiles in specified directory 3) loop over unzipped repos. For each one: - a) see if "approval" project with same name exists, if not, create it, and branch "" + a) see if "approval" project with same name exists, if not, create it b) check if merge request to "approval/" with source and target branches "commit-" and "" already exists. If so, skip to the next unzipped repo. @@ -17,7 +17,8 @@ c) clone "unapproved" project, and create branch called "commit-" d) copy in contents of unzipped repo. e) git add, commit and push to "unapproved" project - f) create merge request from unapproved/repo_name/commit_hash to + f) create branch "" on "approval" project + g) create merge request from unapproved/repo_name/commit_hash to approval/repo_name/desired_branch_name 4) clean up - remove zipfiles and unpacked repos. """ @@ -563,48 +564,6 @@ def fork_project(repo_name, project_id, namespace_id, return new_project_id - -def create_project_and_branch(repo_name, - branch_name, - namespace_id, - gitlab_url, - gitlab_token): - - """ - Create a new branch (and a new project if it doesn't already exist) - owned by the "approval" group. This will be the target for the merge - request. - - Parameters - ========== - repo_name: str, repository name - gitlab_url: str, base URL for Gitlab API - gitlab_token: str, API token for Gitlab API - branch_name: str, the desired branch name. - - Returns - ======= - project_id: int, the "ID" field in the info from projects API endpoint - """ - # get the project ID - project will be created if it doesn't already exist - project_id = get_project_id(repo_name, namespace_id, gitlab_url, gitlab_token) - assert project_id - - # create the branch if it doesn't already exist - branch_exists = check_if_branch_exists(branch_name, - project_id, - gitlab_url, - gitlab_token) - if not branch_exists: - branch_info = create_branch(branch_name, - project_id, - gitlab_url, - gitlab_token) - assert branch_info["name"] == branch_name - # return the ID of this project so we can use it in merge request - return project_id - - def unzipped_repo_to_merge_request(repo_details, tmp_repo_dir, gitlab_config, @@ -624,29 +583,29 @@ def unzipped_repo_to_merge_request(repo_details, """ # unpack tuple - repo_name, commit_hash, branch_name, unzipped_location = repo_details - logger.info("Unpacked {} {} {}".format(repo_name, commit_hash, branch_name)) + repo_name, commit_hash, target_branch_name, unzipped_location = repo_details + logger.info("Unpacked {} {} {}".format(repo_name, commit_hash, target_branch_name)) # create project and branch on approved repo - target_project_id = create_project_and_branch(repo_name, - branch_name, - namespace_ids[group_names[1]], - gitlab_config["api_url"], - gitlab_config["api_token"]) - logger.info("Created project {}/{} branch {}".\ - format(group_names[1],repo_name, branch_name)) + target_project_info = create_project(repo_name, + namespace_ids[group_names[1]], + gitlab_config["api_url"], + gitlab_config["api_token"]) + target_project_id = target_project_info["id"] + logger.info("Created project {}/{} ".\ + format(group_names[1],repo_name)) # Check if we already have a Merge Request - if so we can just skip to the end - unapproved_branch_name = "branch-{}".format(commit_hash) - mr_exists = check_if_merge_request_exists(unapproved_branch_name, + src_branch_name = "branch-{}".format(commit_hash) + mr_exists = check_if_merge_request_exists(src_branch_name, target_project_id, - branch_name, + target_branch_name, gitlab_config["api_url"], gitlab_config["api_token"]) if mr_exists: logger.info("Merge Request for {} {} to {} already exists - skipping".\ format(repo_name, - unapproved_branch_name, - target_branch)) + src_branch_name, + target_branch_name)) return # If we got here, MR doesn't already exist - go through the rest of the steps. @@ -670,21 +629,34 @@ def unzipped_repo_to_merge_request(repo_details, clone_commit_and_push(repo_name, unzipped_location, tmp_repo_dir, - unapproved_branch_name, + src_branch_name, remote_url) - logger.info("Pushed to {}/{} branch {}".format(group_names[0],repo_name, unapproved_branch_name)) + logger.info("Pushed to {}/{} branch {}".format(group_names[0],repo_name, src_branch_name)) - # Create the merge request + # Create the branch on the "approval" project if it doesn't already exist + branch_exists = check_if_branch_exists(target_branch_name, + target_project_id, + gitlab_config["api_url"], + gitlab_config["api_token"]) + if not branch_exists: + branch_info = create_branch(target_branch_name, + target_project_id, + gitlab_config["api_url"], + gitlab_config["api_token"]) + assert branch_info["name"] == branch_name + + + # Create the merge request create_merge_request(repo_name, src_project_id, - unapproved_branch_name, + src_branch_name, target_project_id, - branch_name, + target_branch_name, gitlab_config["api_url"], gitlab_config["api_token"]) logger.info("Created merge request {} -> {}".\ - format(commit_hash, branch_name)) + format(commit_hash, target_branch_name)) return True @@ -709,7 +681,7 @@ def cleanup(zipfile_dir, tmp_unzipped_dir, tmp_repo_dir): try: for filename in os.listdir(zipfile_dir): filepath = os.path.join(zipfile_dir, filename) - subprocess.run(["rm",filepath], check=True) + subprocess.run(["rm","-f",filepath], check=True) logger.info("Removed file {}".format(filepath)) except(FileNotFoundError): logger.info("Zipfile directory {} not found - skipping".format(zipfile_dir)) @@ -718,7 +690,7 @@ def cleanup(zipfile_dir, tmp_unzipped_dir, tmp_repo_dir): def main(): - ZIPFILE_DIR = "/zfiles" + ZIPFILE_DIR = "/tmp/zipfiles" # create a directory to unpack the zipfiles into TMP_UNZIPPED_DIR = "/tmp/unzipped" os.makedirs(TMP_UNZIPPED_DIR, exist_ok=True) From d76ab25a338dd695601007aaf5a9485d3b443c7f Mon Sep 17 00:00:00 2001 From: nbarlowATI Date: Mon, 18 May 2020 14:28:22 +0100 Subject: [PATCH 049/155] fix path for downloading git repo zipfiles on gitlab-external --- .../administration/SRE_Upload_Git_Repo_to_GitlabExternal.ps1 | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/deployment/administration/SRE_Upload_Git_Repo_to_GitlabExternal.ps1 b/deployment/administration/SRE_Upload_Git_Repo_to_GitlabExternal.ps1 index 438b16ff1b..7fdbb571bc 100644 --- a/deployment/administration/SRE_Upload_Git_Repo_to_GitlabExternal.ps1 +++ b/deployment/administration/SRE_Upload_Git_Repo_to_GitlabExternal.ps1 @@ -97,8 +97,8 @@ Add-LogMessage -Level Info "Got SAS token and URL $remoteUrl" # Create remote script (make a directory /zfiles/ and run CURL to download blob to there) $script = @" #!/bin/bash -mkdir -p /zfiles -curl -X GET -o /zfiles/${zipFileName} "${remoteUrl}" +mkdir -p /tmp/zipfiles +curl -X GET -o /tmp/zipfiles/${zipFileName} "${remoteUrl}" "@ $resourceGroupName = $config.sre.webapps.rg From 0fd13561578c4d1a81c93419c133ffad83f33933 Mon Sep 17 00:00:00 2001 From: nbarlowATI Date: Mon, 18 May 2020 17:40:47 +0100 Subject: [PATCH 050/155] change ownership of zipfile dir on gitlab-external --- .../administration/SRE_Upload_Git_Repo_to_GitlabExternal.ps1 | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/deployment/administration/SRE_Upload_Git_Repo_to_GitlabExternal.ps1 b/deployment/administration/SRE_Upload_Git_Repo_to_GitlabExternal.ps1 index 7fdbb571bc..b73133c4a2 100644 --- a/deployment/administration/SRE_Upload_Git_Repo_to_GitlabExternal.ps1 +++ b/deployment/administration/SRE_Upload_Git_Repo_to_GitlabExternal.ps1 @@ -94,11 +94,15 @@ $sasToken = New-ReadOnlyAccountSasToken -ResourceGroup $storageResourceGroupName $remoteUrl = "https://$($sreStorageAccount.StorageAccountName).blob.core.windows.net/${containerName}/${zipFileName}${sasToken}" Add-LogMessage -Level Info "Got SAS token and URL $remoteUrl" +$sreAdminUsername = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.Name -SecretName $config.sre.keyVault.secretNames.adminUsername -DefaultValue "sre$($config.sre.id)admin".ToLower() + # Create remote script (make a directory /zfiles/ and run CURL to download blob to there) $script = @" #!/bin/bash mkdir -p /tmp/zipfiles curl -X GET -o /tmp/zipfiles/${zipFileName} "${remoteUrl}" + +chown -R ${sreAdminUsername}:${sreAdminUsername} /tmp/zipfiles/ "@ $resourceGroupName = $config.sre.webapps.rg From e4a3304c5edec835ce503d2d76295bb76378cb1b Mon Sep 17 00:00:00 2001 From: nbarlowATI Date: Mon, 18 May 2020 17:41:28 +0100 Subject: [PATCH 051/155] check if project already exists before creating on gitlab-external --- .../scripts/zipfile_to_gitlab_project.py | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py b/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py index b784a45283..ec845414ec 100644 --- a/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py +++ b/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py @@ -260,7 +260,7 @@ def get_project_remote_url(repo_name, namespace_id, def get_project_id(repo_name, namespace_id, - gitlab_url, gitlab_token): + gitlab_url, gitlab_token): """ Given the name of a repository and namespace_id (i.e. group, "unapproved" or "approval"), either return the id of project @@ -586,12 +586,10 @@ def unzipped_repo_to_merge_request(repo_details, # unpack tuple repo_name, commit_hash, target_branch_name, unzipped_location = repo_details logger.info("Unpacked {} {} {}".format(repo_name, commit_hash, target_branch_name)) - # create project and branch on approved repo - target_project_info = create_project(repo_name, - namespace_ids[group_names[1]], - gitlab_config["api_url"], - gitlab_config["api_token"]) - target_project_id = target_project_info["id"] + # create project on approved repo if not already there - this func will do that + target_project_id = get_project_id(repo_name, namespace_ids[group_names[1]], + gitlab_config["api_url"], + gitlab_config["api_token"]) logger.info("Created project {}/{} ".\ format(group_names[1],repo_name)) @@ -645,7 +643,7 @@ def unzipped_repo_to_merge_request(repo_details, target_project_id, gitlab_config["api_url"], gitlab_config["api_token"]) - assert branch_info["name"] == branch_name + assert branch_info["name"] == target_branch_name # Create the merge request From c639e472fedc5d57038133e1d1df248ec40549be Mon Sep 17 00:00:00 2001 From: nbarlowATI Date: Mon, 18 May 2020 17:42:47 +0100 Subject: [PATCH 052/155] simplifications to how disk is mounted, and fix to gitlab datadisk path --- .../cloud-init-gitlab-external.template.yaml | 18 +++++++++--------- .../cloud-init-gitlab-internal.template.yaml | 19 +++++++++---------- 2 files changed, 18 insertions(+), 19 deletions(-) diff --git a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml index 6aaf646915..eefd78cbfe 100644 --- a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml +++ b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml @@ -108,17 +108,17 @@ runcmd: - dpkg-reconfigure -f noninteractive tzdata # Set up the data disk - echo "Setting up data disk..." - - parted /dev/sdc mklabel gpt - - parted /dev/sdc mkpart primary ext4 0% 100% - - parted /dev/sdc print + - DEVICE=$(readlink -f /dev/disk/azure/scsi1/lun0) + - parted ${DEVICE} mklabel gpt + - parted ${DEVICE} mkpart primary ext4 0% 100% + - parted ${DEVICE} print - sleep 5 - - mkfs -t ext4 /dev/sdc1 + - mkfs -t ext4 ${DEVICE}1 - mkdir -p /datadrive - - mount /dev/sdc1 /datadrive - - UUID=$(blkid | grep "/dev/sdc1" | cut -d'"' -f2) - - sed "s|UUID|UUID=$UUID\t/datadrive\text4\tdefaults,nofail\t1\t2\nUUID|" /etc/fstab > fstab.tmp - - mv fstab.tmp /etc/fstab - - mkdir -p /datadrive/gitlab-data + - mount ${DEVICE}1 /datadrive + - UUID=$(blkid | grep "${DEVICE}1" | cut -d'"' -f2) + - echo "UUID=${UUID}\t/datadrive\text4\tdefaults,nofail\t1\t2" >> /etc/fstab + - mkdir -p /datadrive/gitdata # Enable custom GitLab settings and run an initial configuration - echo "Running initial configuration" - gitlab-ctl reconfigure diff --git a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-internal.template.yaml b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-internal.template.yaml index 0081b29f58..9aec531eaa 100644 --- a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-internal.template.yaml +++ b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-internal.template.yaml @@ -60,17 +60,17 @@ runcmd: - dpkg-reconfigure -f noninteractive tzdata # Set up the data disk - echo "Setting up data disk..." - - parted /dev/sdc mklabel gpt - - parted /dev/sdc mkpart primary ext4 0% 100% - - parted /dev/sdc print + - DEVICE=$(readlink -f /dev/disk/azure/scsi1/lun0) + - parted $DEVICE mklabel gpt + - parted $DEVICE mkpart primary ext4 0% 100% + - parted $DEVICE print - sleep 5 - - mkfs -t ext4 /dev/sdc1 + - mkfs -t ext4 ${DEVICE}1 - mkdir -p /datadrive - - mount /dev/sdc1 /datadrive - - UUID=$(blkid | grep "/dev/sdc1" | cut -d'"' -f2) - - sed "s|UUID|UUID=$UUID\t/datadrive\text4\tdefaults,nofail\t1\t2\nUUID|" /etc/fstab > fstab.tmp - - mv fstab.tmp /etc/fstab - - mkdir -p /datadrive/gitlab-data + - mount ${DEVICE}1 /datadrive + - UUID=$(blkid | grep "${DEVICE}1" | cut -d'"' -f2) + - echo "UUID=${UUID}\t/datadrive\text4\tdefaults,nofail\t1\t2" >> /etc/fstab + - mkdir -p /datadrive/gitdata # Enable custom GitLab settings and run an initial configuration - echo "Running initial configuration" - gitlab-ctl reconfigure @@ -99,4 +99,3 @@ power_state: message: "Shutting down as a signal that setup is finished" timeout: 30 condition: True - From 5bd6752402c33c2f47d19c89fccf149350f45fdc Mon Sep 17 00:00:00 2001 From: Jack Roberts Date: Tue, 19 May 2020 11:59:51 +0100 Subject: [PATCH 053/155] Force path to match case of name in internal_update_repo --- .../cloud_init/scripts/check_merge_requests.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/deployment/secure_research_environment/cloud_init/scripts/check_merge_requests.py b/deployment/secure_research_environment/cloud_init/scripts/check_merge_requests.py index 5971352464..6fa0b67b9d 100644 --- a/deployment/secure_research_environment/cloud_init/scripts/check_merge_requests.py +++ b/deployment/secure_research_environment/cloud_init/scripts/check_merge_requests.py @@ -118,7 +118,7 @@ def internal_update_repo(git_url, repo_name, config): response = requests.post( config["api_url"] + "projects", headers=config["headers"], - data={"name": repo_name, "visibility": "public"}, + data={"name": repo_name, "path": repo_name, "visibility": "public"}, ) response.raise_for_status() assert response.json()["path_with_namespace"] == "ingress/" + repo_name From d70ed6742dba16411d2e2c5ce426c00ae44fc189 Mon Sep 17 00:00:00 2001 From: nbarlowATI Date: Tue, 19 May 2020 12:32:39 +0100 Subject: [PATCH 054/155] import Security.psm1 to use Resolve-KeyvaultSecret function --- .../administration/SRE_Upload_Git_Repo_to_GitlabExternal.ps1 | 1 + 1 file changed, 1 insertion(+) diff --git a/deployment/administration/SRE_Upload_Git_Repo_to_GitlabExternal.ps1 b/deployment/administration/SRE_Upload_Git_Repo_to_GitlabExternal.ps1 index b73133c4a2..7acc9d1d32 100644 --- a/deployment/administration/SRE_Upload_Git_Repo_to_GitlabExternal.ps1 +++ b/deployment/administration/SRE_Upload_Git_Repo_to_GitlabExternal.ps1 @@ -13,6 +13,7 @@ param( Import-Module Az Import-Module $PSScriptRoot/../common/Configuration.psm1 -Force +Import-Module $PSScriptRoot/../common/Security.psm1 -Force Import-Module $PSScriptRoot/../common/Logging.psm1 -Force Import-Module $PSScriptRoot/../common/Deployments.psm1 -Force Import-Module $PSScriptRoot/../common/GenerateSasToken.psm1 -Force From fdef7f2f4ecd14af9a9feb2f72359dbf302cb362 Mon Sep 17 00:00:00 2001 From: Jack Roberts Date: Tue, 19 May 2020 17:16:56 +0100 Subject: [PATCH 055/155] Robustify zipfile_to_gitlab_project and check_merge_requests to cope with various issues --- .../scripts/check_merge_requests.py | 30 +- .../scripts/zipfile_to_gitlab_project.py | 484 ++++++++++-------- .../Deploy_RDS_Environment.template.ps1 | 2 +- 3 files changed, 306 insertions(+), 210 deletions(-) diff --git a/deployment/secure_research_environment/cloud_init/scripts/check_merge_requests.py b/deployment/secure_research_environment/cloud_init/scripts/check_merge_requests.py index 6fa0b67b9d..2fc52af7a7 100644 --- a/deployment/secure_research_environment/cloud_init/scripts/check_merge_requests.py +++ b/deployment/secure_research_environment/cloud_init/scripts/check_merge_requests.py @@ -88,7 +88,7 @@ def internal_project_exists(repo_name, config): ) -def internal_update_repo(git_url, repo_name, config): +def internal_update_repo(git_url, repo_name, branch_name, config): """Takes a git URL, `git_url`, which should be the URL to the "APPROVED" repo on GITLAB-EXTERNAL, clones it and pushes all branches to the repo `repo_name` owned by 'ingress' on GITLAB-INTERNAL, creating it @@ -109,6 +109,7 @@ def internal_update_repo(git_url, repo_name, config): # same remote and pulling) subprocess.run(["rm", "-rf", repo_name], check=True) subprocess.run(["git", "clone", git_url, repo_name], check=True) + subprocess.run(["git", "checkout", branch_name], cwd=repo_name, check=True) project_exists, gl_internal_repo_url = internal_project_exists(repo_name, config) @@ -134,9 +135,7 @@ def internal_update_repo(git_url, repo_name, config): # Force push current contents of all branches subprocess.run( - ["git", "push", "--force", "--all", "gitlab-internal"], - cwd=repo_name, - check=True, + ["git", "push", "--force", "gitlab-internal"], cwd=repo_name, check=True ) @@ -308,7 +307,9 @@ def get_merge_requests_for_approval(config): group = get_group_id("approval", config) endpoint = config["api_url"] + f"/groups/{group}/merge_requests" response = get_request( - endpoint, headers=config["headers"], params={"state": "opened"} + endpoint, + headers=config["headers"], + params={"state": "opened", "scope": "created_by_me"}, ) return response @@ -388,6 +389,21 @@ def check_merge_requests(): logger.critical(f"Failed to load gitlab secrets: {e}") return + try: + internal_status = requests.get( + config_internal["api_url"] + "projects", + headers=config_internal["headers"], + timeout=5, + ) + if not internal_status.ok: + logger.critical( + f"Gitlab Internal Not Responding: {internal_status.status_code}, CONTENT {internal_status.content}" + ) + return + except Exception as e: + logger.critical(f"Gitlab Internal Not Responding: {e}") + return + logger.info("Getting open merge requests for approval") try: merge_requests = get_merge_requests_for_approval(config_external) @@ -406,7 +422,8 @@ def check_merge_requests(): logger.info(f"Source Branch: {mr['source_branch']}") target_project = get_project(mr["project_id"], config_external) logger.info(f"Target Project: {target_project['name_with_namespace']}") - logger.info(f"Target Branch: {mr['target_branch']}") + target_branch = mr["target_branch"] + logger.info(f"Target Branch: {target_branch}") logger.info(f"Commit SHA: {mr['sha']}") logger.info(f"Created At: {mr['created_at']}") status = mr["merge_status"] @@ -455,6 +472,7 @@ def check_merge_requests(): internal_update_repo( target_project["ssh_url_to_repo"], target_project["name"], + target_branch, config_internal, ) except Exception as e: diff --git a/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py b/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py index ec845414ec..7f647be747 100644 --- a/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py +++ b/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py @@ -39,7 +39,7 @@ logger.setLevel(logging.INFO) formatter = logging.Formatter("%(asctime)s [%(levelname)s] %(message)s") f_handler = RotatingFileHandler( -"upload_zipfiles_to_projects.log", maxBytes=5 * 1024 * 1024, backupCount=10 + "upload_zipfiles_to_projects.log", maxBytes=5 * 1024 * 1024, backupCount=10 ) f_handler.setFormatter(formatter) c_handler = logging.StreamHandler() @@ -71,8 +71,10 @@ def unzip_zipfiles(zipfile_dir, tmp_unzipped_dir): # look in a directory for zipfiles try: zipfiles = os.listdir(zipfile_dir) - except(FileNotFoundError): - logger.info("Zipfile dir {} not found - assume nothing to unzip".format(zipfile_dir)) + except (FileNotFoundError): + logger.info( + "Zipfile dir {} not found - assume nothing to unzip".format(zipfile_dir) + ) return [] for zipfile in zipfiles: filename_match = repo_commit_regex.search(zipfile) @@ -84,13 +86,13 @@ def unzip_zipfiles(zipfile_dir, tmp_unzipped_dir): # unzip try: zipfile_path = os.path.join(zipfile_dir, zipfile) - with ZipFile(zipfile_path, 'r') as zip_obj: + with ZipFile(zipfile_path, "r") as zip_obj: zip_obj.extractall(path=tmp_unzipped_dir) # we should have made a new directory - find its name unpacked_zips = os.listdir(tmp_unzipped_dir) unpacked_location = os.path.join(tmp_unzipped_dir, unpacked_zips[0]) output_list.append((repo_name, commit_hash, branch, unpacked_location)) - except(BadZipFile): + except (BadZipFile): logger.info("Bad zipfile: {}".format(zipfile)) continue return output_list @@ -111,14 +113,12 @@ def get_gitlab_config(): api_url = f"http://{ip}/api/v4/" headers = {"Authorization": "Bearer " + token} - return {"api_url": api_url, - "api_token": token, - "ip": ip, - "headers": headers} + return {"api_url": api_url, "api_token": token, "ip": ip, "headers": headers} -def get_group_namespace_ids(gitlab_url, gitlab_token, - groups=["approval","unapproved"]): +def get_group_namespace_ids( + gitlab_url, gitlab_token, groups=["approval", "unapproved"] +): """ Find the namespace_id corresponding to the groups we're interested in, e.g. 'approval' and 'unapproved'. @@ -135,11 +135,13 @@ def get_group_namespace_ids(gitlab_url, gitlab_token, """ namespaces_url = "{}/namespaces/".format(gitlab_url) - response = requests.get(namespaces_url, - headers = {"Authorization": "Bearer "+gitlab_token}) + response = requests.get( + namespaces_url, headers={"Authorization": "Bearer " + gitlab_token} + ) if response.status_code != 200: - raise RuntimeError("Bad request: {} {}"\ - .format(response.status_code, response.content)) + raise RuntimeError( + "Bad request: {} {}".format(response.status_code, response.content) + ) gitlab_namespaces = response.json() namespace_id_dict = {} for namespace in gitlab_namespaces: @@ -165,13 +167,16 @@ def get_gitlab_project_list(gitlab_url, gitlab_token): # list currently existing projects on Gitlab projects_url = "{}/projects/".format(gitlab_url) - response = requests.get(projects_url, - headers = {"Authorization": "Bearer "+gitlab_token}, - params = {"owned": True, "simple": True}) + response = requests.get( + projects_url, + headers={"Authorization": "Bearer " + gitlab_token}, + params={"owned": True, "simple": True}, + ) if response.status_code != 200: - raise RuntimeError("Bad request: {} {}"\ - .format(response.status_code, response.content)) + raise RuntimeError( + "Bad request: {} {}".format(response.status_code, response.content) + ) gitlab_projects = response.json() return gitlab_projects @@ -194,8 +199,7 @@ def check_if_project_exists(repo_name, namespace_id, gitlab_url, gitlab_token): """ projects = get_gitlab_project_list(gitlab_url, gitlab_token) for project in projects: - if project["name"] == repo_name and \ - project["namespace"]["id"] == namespace_id: + if project["name"] == repo_name and project["namespace"]["id"] == namespace_id: return True return False @@ -216,26 +220,23 @@ def get_project_info(repo_name, namespace_id, gitlab_url, gitlab_token): ======= project_info: dict, containing info from the projects API endpoint """ - already_exists = check_if_project_exists(repo_name, - namespace_id, - gitlab_url, - gitlab_token) + already_exists = check_if_project_exists( + repo_name, namespace_id, gitlab_url, gitlab_token + ) if already_exists: projects = get_gitlab_project_list(gitlab_url, gitlab_token) for project_info in projects: - if project_info["name"] == repo_name and \ - project_info["namespace"]["id"] == namespace_id: + if ( + project_info["name"] == repo_name + and project_info["namespace"]["id"] == namespace_id + ): return project_info else: - project_info = create_project(repo_name, - namespace_id, - gitlab_url, - gitlab_token) + project_info = create_project(repo_name, namespace_id, gitlab_url, gitlab_token) return project_info -def get_project_remote_url(repo_name, namespace_id, - gitlab_url, gitlab_token): +def get_project_remote_url(repo_name, namespace_id, gitlab_url, gitlab_token): """ Given the name of a repository and namespace_id (i.e. group, "unapproved" or "approval"), either return the remote URL for project @@ -253,14 +254,12 @@ def get_project_remote_url(repo_name, namespace_id, ======= gitlab_project_url: str, the URL to be set as the "remote". """ - project_info = get_project_info(repo_name, namespace_id, - gitlab_url, gitlab_token) + project_info = get_project_info(repo_name, namespace_id, gitlab_url, gitlab_token) return project_info["ssh_url_to_repo"] -def get_project_id(repo_name, namespace_id, - gitlab_url, gitlab_token): +def get_project_id(repo_name, namespace_id, gitlab_url, gitlab_token): """ Given the name of a repository and namespace_id (i.e. group, "unapproved" or "approval"), either return the id of project @@ -278,8 +277,7 @@ def get_project_id(repo_name, namespace_id, ======= gitlab_project_url: str, the URL to be set as the "remote". """ - project_info = get_project_info(repo_name, namespace_id, - gitlab_url, gitlab_token) + project_info = get_project_info(repo_name, namespace_id, gitlab_url, gitlab_token) return project_info["id"] @@ -301,24 +299,28 @@ def create_project(repo_name, namespace_id, gitlab_url, gitlab_token): the remote URL for the project. """ projects_url = "{}projects/".format(gitlab_url) - response = requests.post(projects_url, - headers = {"Authorization": "Bearer "+gitlab_token}, - data = {"name": repo_name, - "path": repo_name, - "visibility": "public", - "namespace_id": namespace_id} + response = requests.post( + projects_url, + headers={"Authorization": "Bearer " + gitlab_token}, + data={ + "name": repo_name, + "path": repo_name, + "visibility": "public", + "namespace_id": namespace_id, + "initialize_with_readme": True, + }, ) - assert(response.json()["name"] == repo_name) + assert response.json()["name"] == repo_name project_info = response.json() - logger.info("Created project {} in namespace {}, project_id {}".\ - format(repo_name, namespace_id, project_info["id"])) + logger.info( + "Created project {} in namespace {}, project_id {}".format( + repo_name, namespace_id, project_info["id"] + ) + ) return project_info -def check_if_branch_exists(branch_name, - project_id, - gitlab_url, - gitlab_token): +def check_if_branch_exists(branch_name, project_id, gitlab_url, gitlab_token): """ See if a branch with name branch_name already exists on this Project @@ -333,13 +335,16 @@ def check_if_branch_exists(branch_name, ======= branch_exists: bool, True if branch exists, False if not. """ - branches_url = "{}/projects/{}/repository/branches".\ - format(gitlab_url, project_id) - response = requests.get(branches_url, - headers={"Authorization": "Bearer "+gitlab_token}) + branches_url = "{}/projects/{}/repository/branches".format(gitlab_url, project_id) + response = requests.get( + branches_url, headers={"Authorization": "Bearer " + gitlab_token} + ) if response.status_code != 200: - raise RuntimeError("Unable to check for branch {} on project {}: {}".\ - format(branch_name, project_id, r.content)) + raise RuntimeError( + "Unable to check for branch {} on project {}: {}".format( + branch_name, project_id, r.content + ) + ) branches = response.json() for branch_info in branches: if branch_info["name"] == branch_name: @@ -347,12 +352,9 @@ def check_if_branch_exists(branch_name, return False - -def create_branch(branch_name, - project_id, - gitlab_url, - gitlab_token, - reference_branch="master"): +def create_branch( + branch_name, project_id, gitlab_url, gitlab_token, reference_branch="master" +): """ Create a new branch on an existing project. By default, use 'master' as the reference branch from which to create the new one. @@ -372,21 +374,23 @@ def create_branch(branch_name, """ # assume branch doesn't already exist - create it! branch_url = "{}/projects/{}/repository/branches".format(gitlab_url, project_id) - response = requests.post(branch_url, - headers = {"Authorization": "Bearer "+gitlab_token}, - data = {"branch": branch_name, "ref": reference_branch}) + response = requests.post( + branch_url, + headers={"Authorization": "Bearer " + gitlab_token}, + data={"branch": branch_name, "ref": reference_branch}, + ) if response.status_code != 201: - raise RuntimeError("Problem creating branch {}: {}".format(branch_name, - response.content)) + raise RuntimeError( + "Problem creating branch {}: {}".format(branch_name, response.content) + ) branch_info = response.json() assert branch_info["name"] == branch_name return branch_info -def check_if_merge_request_exists(source_branch, - target_project_id, - target_branch, - gitlab_url, gitlab_token): +def check_if_merge_request_exists( + source_branch, target_project_id, target_branch, gitlab_url, gitlab_token +): """ See if there is an existing merge request between the source and target project/branch combinations. @@ -406,26 +410,36 @@ def check_if_merge_request_exists(source_branch, bool, True if merge request already exists, False otherwise """ mr_url = "{}/projects/{}/merge_requests".format(gitlab_url, target_project_id) - response = requests.get(mr_url, - headers = {"Authorization": "Bearer "+gitlab_token}) + response = requests.get(mr_url, headers={"Authorization": "Bearer " + gitlab_token}) if response.status_code != 200: - raise RuntimeError("Request to check existence of MR failed: {} {}".\ - format(response.status_code, response.content)) + raise RuntimeError( + "Request to check existence of MR failed: {} {}".format( + response.status_code, response.content + ) + ) for mr in response.json(): - if mr["source_branch"] == source_branch and \ - mr["target_branch"] == target_branch: - logger.info("Merge request {} -> {} already exists".\ - format(source_branch, target_branch)) + if ( + mr["source_branch"] == source_branch + and mr["target_branch"] == target_branch + ): + logger.info( + "Merge request {} -> {} already exists".format( + source_branch, target_branch + ) + ) return True return False -def create_merge_request(repo_name, - source_project_id, - source_branch, - target_project_id, - target_branch, - gitlab_url, gitlab_token): +def create_merge_request( + repo_name, + source_project_id, + source_branch, + target_project_id, + target_branch, + gitlab_url, + gitlab_token, +): """ Create a new MR, e.g. from the branch in the "unapproved" @@ -451,39 +465,51 @@ def create_merge_request(repo_name, mr_info: dict, the response from the API upon creating the Merge Request """ # first need to create a forked-from relationship between the projects - fork_url = "{}/projects/{}/fork/{}".format(gitlab_url, - source_project_id, - target_project_id) - response = requests.post(fork_url, - headers = {"Authorization": "Bearer "+gitlab_token}) + fork_url = "{}/projects/{}/fork/{}".format( + gitlab_url, source_project_id, target_project_id + ) + response = requests.post( + fork_url, headers={"Authorization": "Bearer " + gitlab_token} + ) # status code 201 if fork relationship created, or 409 if already there if (response.status_code != 201) and (response.status_code != 409): - raise RuntimeError("Unable to create fork relationship: {} {}".\ - format(response.status_code, response.content)) + raise RuntimeError( + "Unable to create fork relationship: {} {}".format( + response.status_code, response.content + ) + ) mr_url = "{}/projects/{}/merge_requests".format(gitlab_url, source_project_id) title = "{}: {} to {}".format(repo_name, source_branch, target_branch) - response = requests.post(mr_url, - headers = {"Authorization": "Bearer "+gitlab_token}, - data = {"source_branch": source_branch, - "target_branch": target_branch, - "target_project_id": target_project_id, - "title": title}) + response = requests.post( + mr_url, + headers={"Authorization": "Bearer " + gitlab_token}, + data={ + "source_branch": source_branch, + "target_branch": target_branch, + "target_project_id": target_project_id, + "title": title, + }, + ) if response.status_code != 201: -# raise RuntimeError("Problem creating Merge Request {} {} {}: {}"\ -# .format(repo_name, source_branch,target_branch, -# response.content)) -##### TEMPORARY - don't raise an error here - we get 500 status code -##### even though MR is created it - under investigation. - logger.info("Problem creating Merge Request {} {} {}: {}"\ - .format(repo_name, source_branch,target_branch, - response.content)) + # raise RuntimeError("Problem creating Merge Request {} {} {}: {}"\ + # .format(repo_name, source_branch,target_branch, + # response.content)) + ##### TEMPORARY - don't raise an error here - we get 500 status code + ##### even though MR is created it - under investigation. + logger.info( + "Problem creating Merge Request {} {} {}: {}".format( + repo_name, source_branch, target_branch, response.content + ) + ) return {} mr_info = response.json() return mr_info -def clone_commit_and_push(repo_name, path_to_unzipped_repo, tmp_repo_dir, branch_name, remote_url): +def clone_commit_and_push( + repo_name, path_to_unzipped_repo, tmp_repo_dir, branch_name, remote_url +): """ Run shell commands to convert the unzipped directory containing the repository contents into a git repo, then commit it to a branch named @@ -500,28 +526,31 @@ def clone_commit_and_push(repo_name, path_to_unzipped_repo, tmp_repo_dir, branch as a "remote". """ # Clone the repo - subprocess.run(["git","clone",remote_url],cwd=tmp_repo_dir, check=True) + subprocess.run(["git", "clone", remote_url], cwd=tmp_repo_dir, check=True) working_dir = os.path.join(tmp_repo_dir, repo_name) assert os.path.exists(working_dir) # Copy the unzipped repo contents into our cloned (empty) repo for item in os.listdir(path_to_unzipped_repo): - subprocess.run(["cp","-r",os.path.join(path_to_unzipped_repo,item),"."], - cwd=working_dir, check=True) + subprocess.run( + ["cp", "-r", os.path.join(path_to_unzipped_repo, item), "."], + cwd=working_dir, + check=True, + ) # Create a branch named after the original commit hash - subprocess.run(["git","checkout","-b",branch_name], - cwd=working_dir, check=True) + subprocess.run(["git", "checkout", "-b", branch_name], cwd=working_dir, check=True) # Commit everything to this branch, also putting commit hash into message - subprocess.run(["git","add","."], cwd=working_dir, check=True) + subprocess.run(["git", "add", "."], cwd=working_dir, check=True) commit_msg = "Committing to branch {}".format(branch_name) - subprocess.run(["git","commit","-m", commit_msg], - cwd=working_dir, check=True) + subprocess.run(["git", "commit", "-m", commit_msg], cwd=working_dir, check=True) # Push back to gitlab external - subprocess.run(["git","push","--set-upstream","origin",branch_name], - cwd=working_dir, check=True) + subprocess.run( + ["git", "push", "--set-upstream", "origin", branch_name], + cwd=working_dir, + check=True, + ) -def fork_project(repo_name, project_id, namespace_id, - gitlab_url, gitlab_token): +def fork_project(repo_name, project_id, namespace_id, gitlab_url, gitlab_token): """ Fork the project 'approval/' to 'unapproved/' after first checking whether the latter exists. @@ -538,38 +567,45 @@ def fork_project(repo_name, project_id, namespace_id, ======= new_project_id: int, the id of the newly created 'unapproved/' project """ - already_exists = check_if_project_exists(repo_name, namespace_id, gitlab_url, gitlab_token) + already_exists = check_if_project_exists( + repo_name, namespace_id, gitlab_url, gitlab_token + ) if not already_exists: fork_url = "{}/projects/{}/fork".format(gitlab_url, project_id) - response = requests.post(fork_url, - headers = {"Authorization": "Bearer "+gitlab_token}, - data = {"namespace_id": namespace_id}) + response = requests.post( + fork_url, + headers={"Authorization": "Bearer " + gitlab_token}, + data={"namespace_id": namespace_id}, + ) if response.status_code != 201: raise RuntimeError("Problem creating fork: {}".format(response.content)) new_project_id = response.json()["id"] return new_project_id else: # project already exists - ensure it is a fork of 'approval/' - new_project_id = get_project_id(repo_name, namespace_id, - gitlab_url, gitlab_token) - fork_url = "{}/projects/{}/fork/{}".format(gitlab_url, - new_project_id, - project_id) - response = requests.post(fork_url, - headers = {"Authorization": "Bearer "+gitlab_token}) + new_project_id = get_project_id( + repo_name, namespace_id, gitlab_url, gitlab_token + ) + fork_url = "{}/projects/{}/fork/{}".format( + gitlab_url, new_project_id, project_id + ) + response = requests.post( + fork_url, headers={"Authorization": "Bearer " + gitlab_token} + ) # status code 201 if fork relationship created, or 409 if already there if (response.status_code != 201) and (response.status_code != 409): - raise RuntimeError("Unable to create fork relationship: {} {}".\ - format(response.status_code, response.content)) + raise RuntimeError( + "Unable to create fork relationship: {} {}".format( + response.status_code, response.content + ) + ) return new_project_id -def unzipped_repo_to_merge_request(repo_details, - tmp_repo_dir, - gitlab_config, - namespace_ids, - group_names): +def unzipped_repo_to_merge_request( + repo_details, tmp_repo_dir, gitlab_config, namespace_ids, group_names +): """ Go through all the steps for a single repo/project. @@ -587,75 +623,115 @@ def unzipped_repo_to_merge_request(repo_details, repo_name, commit_hash, target_branch_name, unzipped_location = repo_details logger.info("Unpacked {} {} {}".format(repo_name, commit_hash, target_branch_name)) # create project on approved repo if not already there - this func will do that - target_project_id = get_project_id(repo_name, namespace_ids[group_names[1]], - gitlab_config["api_url"], - gitlab_config["api_token"]) - logger.info("Created project {}/{} ".\ - format(group_names[1],repo_name)) + target_project_id = get_project_id( + repo_name, + namespace_ids[group_names[1]], + gitlab_config["api_url"], + gitlab_config["api_token"], + ) + logger.info("Created project {}/{} ".format(group_names[1], repo_name)) # Check if we already have a Merge Request - if so we can just skip to the end src_branch_name = "branch-{}".format(commit_hash) - mr_exists = check_if_merge_request_exists(src_branch_name, - target_project_id, - target_branch_name, - gitlab_config["api_url"], - gitlab_config["api_token"]) + mr_exists = check_if_merge_request_exists( + src_branch_name, + target_project_id, + target_branch_name, + gitlab_config["api_url"], + gitlab_config["api_token"], + ) if mr_exists: - logger.info("Merge Request for {} {} to {} already exists - skipping".\ - format(repo_name, - src_branch_name, - target_branch_name)) + logger.info( + "Merge Request for {} {} to {} already exists - skipping".format( + repo_name, src_branch_name, target_branch_name + ) + ) return # If we got here, MR doesn't already exist - go through the rest of the steps. # Fork this project to "unapproved" group - src_project_id = fork_project(repo_name, - target_project_id, - namespace_ids[group_names[0]], - gitlab_config["api_url"], - gitlab_config["api_token"]) - - logger.info("Forked to project {}/{}".\ - format(group_names[0],repo_name)) + src_project_id = fork_project( + repo_name, + target_project_id, + namespace_ids[group_names[0]], + gitlab_config["api_url"], + gitlab_config["api_token"], + ) + + logger.info("Forked to project {}/{}".format(group_names[0], repo_name)) # Get the remote URL for the unapproved project - remote_url = get_project_remote_url(repo_name, - namespace_ids[group_names[0]], - gitlab_config["api_url"], - gitlab_config["api_token"]) + remote_url = get_project_remote_url( + repo_name, + namespace_ids[group_names[0]], + gitlab_config["api_url"], + gitlab_config["api_token"], + ) # Do the command-line git stuff to push to unapproved project - clone_commit_and_push(repo_name, - unzipped_location, - tmp_repo_dir, - src_branch_name, - remote_url) - logger.info("Pushed to {}/{} branch {}".format(group_names[0],repo_name, src_branch_name)) + branch_exists = check_if_branch_exists( + src_branch_name, + src_project_id, + gitlab_config["api_url"], + gitlab_config["api_token"], + ) + if not branch_exists: + clone_commit_and_push( + repo_name, unzipped_location, tmp_repo_dir, src_branch_name, remote_url + ) + logger.info( + "Pushed to {}/{} branch {}".format( + group_names[0], repo_name, src_branch_name + ) + ) + else: + logger.info( + "{}/{} branch {} already exists".format( + group_names[0], repo_name, src_branch_name + ) + ) # Create the branch on the "approval" project if it doesn't already exist - branch_exists = check_if_branch_exists(target_branch_name, - target_project_id, - gitlab_config["api_url"], - gitlab_config["api_token"]) + branch_exists = check_if_branch_exists( + target_branch_name, + target_project_id, + gitlab_config["api_url"], + gitlab_config["api_token"], + ) if not branch_exists: - branch_info = create_branch(target_branch_name, - target_project_id, - gitlab_config["api_url"], - gitlab_config["api_token"]) + branch_info = create_branch( + target_branch_name, + target_project_id, + gitlab_config["api_url"], + gitlab_config["api_token"], + ) assert branch_info["name"] == target_branch_name - + logger.info( + "{}/{} branch {} created".format( + group_names[1], repo_name, target_branch_name + ) + ) + else: + logger.info( + "{}/{} branch {} already exists".format( + group_names[1], repo_name, target_branch_name + ) + ) # Create the merge request - create_merge_request(repo_name, - src_project_id, - src_branch_name, - target_project_id, - target_branch_name, - gitlab_config["api_url"], - gitlab_config["api_token"]) - logger.info("Created merge request {} -> {}".\ - format(commit_hash, target_branch_name)) + create_merge_request( + repo_name, + src_project_id, + src_branch_name, + target_project_id, + target_branch_name, + gitlab_config["api_url"], + gitlab_config["api_token"], + ) + logger.info( + "Created merge request {} -> {}".format(commit_hash, target_branch_name) + ) return True @@ -680,9 +756,9 @@ def cleanup(zipfile_dir, tmp_unzipped_dir, tmp_repo_dir): try: for filename in os.listdir(zipfile_dir): filepath = os.path.join(zipfile_dir, filename) - subprocess.run(["rm","-f",filepath], check=True) + subprocess.run(["rm", "-f", filepath], check=True) logger.info("Removed file {}".format(filepath)) - except(FileNotFoundError): + except (FileNotFoundError): logger.info("Zipfile directory {} not found - skipping".format(zipfile_dir)) return True @@ -690,12 +766,16 @@ def cleanup(zipfile_dir, tmp_unzipped_dir, tmp_repo_dir): def main(): ZIPFILE_DIR = "/tmp/zipfiles" + os.makedirs(ZIPFILE_DIR, exist_ok=True) # create a directory to unpack the zipfiles into TMP_UNZIPPED_DIR = "/tmp/unzipped" - os.makedirs(TMP_UNZIPPED_DIR, exist_ok=True) + shutil.rmtree(TMP_UNZIPPED_DIR, ignore_errors=True) + os.makedirs(TMP_UNZIPPED_DIR) # and a directory where we will clone projects, then copy file contents in TMP_REPO_DIR = "/tmp/repos" - os.makedirs(TMP_REPO_DIR, exist_ok=True) + shutil.rmtree(TMP_REPO_DIR, ignore_errors=True) + os.makedirs(TMP_REPO_DIR) + # get the gitlab config config = get_gitlab_config() @@ -704,19 +784,17 @@ def main(): unzipped_repos = unzip_zipfiles(ZIPFILE_DIR, TMP_UNZIPPED_DIR) # get the namespace_ids of our "approval" and "unapproved" groups - GROUPS = ["unapproved","approval"] - namespace_ids = get_group_namespace_ids(config["api_url"], - config["api_token"], - GROUPS) + GROUPS = ["unapproved", "approval"] + namespace_ids = get_group_namespace_ids( + config["api_url"], config["api_token"], GROUPS + ) # loop over all our newly unzipped repositories for repo_details in unzipped_repos: # call function to go through all the project/branch/mr creation etc. - unzipped_repo_to_merge_request(repo_details, - TMP_REPO_DIR, - config, - namespace_ids, - GROUPS) + unzipped_repo_to_merge_request( + repo_details, TMP_REPO_DIR, config, namespace_ids, GROUPS + ) # cleanup cleanup(ZIPFILE_DIR, TMP_UNZIPPED_DIR, TMP_REPO_DIR) diff --git a/deployment/secure_research_environment/remote/create_rds/templates/Deploy_RDS_Environment.template.ps1 b/deployment/secure_research_environment/remote/create_rds/templates/Deploy_RDS_Environment.template.ps1 index 6ddd1e7bef..4874a0077a 100644 --- a/deployment/secure_research_environment/remote/create_rds/templates/Deploy_RDS_Environment.template.ps1 +++ b/deployment/secure_research_environment/remote/create_rds/templates/Deploy_RDS_Environment.template.ps1 @@ -86,7 +86,7 @@ foreach($rdsConfiguration in @(("Applications", "", "" -Alias "chrome (1)" -DisplayName "Code Review" -FilePath "C:\Program Files (x86)\Google\Chrome\Application\chrome.exe" -ShowInWebAccess 1 -CommandLineSetting Require -RequiredCommandLine "http://.151" -CollectionName "Review" -ErrorAction Stop + $_ = New-RDRemoteApp -ConnectionBroker "" -Alias "chrome (1)" -DisplayName "Code Review" -FilePath "C:\Program Files (x86)\Google\Chrome\Application\chrome.exe" -ShowInWebAccess 1 -CommandLineSetting Require -RequiredCommandLine "http://.151/groups/approval/-/merge_requests" -CollectionName "Review" -ErrorAction Stop $_ = New-RDRemoteApp -ConnectionBroker "" -Alias "mstsc (1)" -DisplayName "DSVM Main (Desktop)" -FilePath "C:\Windows\system32\mstsc.exe" -ShowInWebAccess 1 -CommandLineSetting Require -RequiredCommandLine "-v .160" -CollectionName "Applications" -ErrorAction Stop $_ = New-RDRemoteApp -ConnectionBroker "" -Alias "mstsc (2)" -DisplayName "DSVM Other (Desktop)" -FilePath "C:\Windows\system32\mstsc.exe" -ShowInWebAccess 1 -CollectionName "Applications" -ErrorAction Stop $_ = New-RDRemoteApp -ConnectionBroker "" -Alias "chrome (2)" -DisplayName "GitLab" -FilePath "C:\Program Files (x86)\Google\Chrome\Application\chrome.exe" -ShowInWebAccess 1 -CommandLineSetting Require -RequiredCommandLine "http://.151" -CollectionName "Applications" -ErrorAction Stop From aebd59af8a7a5a8e882a9a6746527d39a2cfe531 Mon Sep 17 00:00:00 2001 From: Jack Roberts Date: Tue, 19 May 2020 17:51:57 +0100 Subject: [PATCH 056/155] ssh-keyscan localhost instead of external IP address --- .../cloud_init/cloud-init-gitlab-external.template.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml index eefd78cbfe..9f110b67b8 100644 --- a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml +++ b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml @@ -197,7 +197,7 @@ runcmd: - | key=$(cat /home//.ssh/id_ed25519.pub); curl --header 'Authorization: Bearer ' --header 'Content-Type:application/json' --data "{\"key\": \"$key\", \"title\": \"ExternalAPIUser\"}" /api/v4/user/keys; - ssh-keyscan -H >> /home//.ssh/known_hosts; + ssh-keyscan -H localhost >> /home//.ssh/known_hosts; chown : "/home//.ssh/known_hosts" # Create groups for storing unapproved and approval repos - | From 8525dbd7df80e71f3c3c2e96e25cf3d08088c4ea Mon Sep 17 00:00:00 2001 From: Jack Roberts Date: Wed, 20 May 2020 09:36:50 +0100 Subject: [PATCH 057/155] Get gitlab external ssh keys from /etc/ssh/ --- .../cloud_init/cloud-init-gitlab-external.template.yaml | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml index 9f110b67b8..b167bb652a 100644 --- a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml +++ b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml @@ -193,11 +193,15 @@ runcmd: chown : "/home//.secrets/gitlab-external-ip-address"; chown : "/home//.secrets/gitlab-external-username"; chown : "/home//.secrets/gitlab-external-user-email"; - # Create SSH key for gitlab external access, add gitlab external to known hosts + # Create SSH key for gitlab external access - | key=$(cat /home//.ssh/id_ed25519.pub); curl --header 'Authorization: Bearer ' --header 'Content-Type:application/json' --data "{\"key\": \"$key\", \"title\": \"ExternalAPIUser\"}" /api/v4/user/keys; - ssh-keyscan -H localhost >> /home//.ssh/known_hosts; + # Get local ssh host keys, add them to known hosts under the gitlab external ip + - | + echo " $(cat /etc/ssh/ssh_host_rsa_key.pub | cut -d " " -f -2)" >> /home//.ssh/known_hosts + echo " $(cat /etc/ssh/ssh_host_ed25519_key.pub | cut -d " " -f -2)" >> /home//.ssh/known_hosts + echo " $(cat /etc/ssh/ssh_host_ecdsa_key.pub | cut -d " " -f -2)" >> /home//.ssh/known_hosts chown : "/home//.ssh/known_hosts" # Create groups for storing unapproved and approval repos - | From 9af6278b7dfd17069061384a204bf1db8efb8eee Mon Sep 17 00:00:00 2001 From: Oliver Strickson Date: Wed, 20 May 2020 15:33:57 +0100 Subject: [PATCH 058/155] Change commit message when importing snapshot of requested repo --- .../cloud_init/scripts/zipfile_to_gitlab_project.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py b/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py index 7f647be747..0f3e9d263a 100644 --- a/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py +++ b/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py @@ -508,7 +508,7 @@ def create_merge_request( def clone_commit_and_push( - repo_name, path_to_unzipped_repo, tmp_repo_dir, branch_name, remote_url + repo_name, path_to_unzipped_repo, tmp_repo_dir, branch_name, remote_url, commit_hash ): """ Run shell commands to convert the unzipped directory containing the @@ -540,7 +540,7 @@ def clone_commit_and_push( subprocess.run(["git", "checkout", "-b", branch_name], cwd=working_dir, check=True) # Commit everything to this branch, also putting commit hash into message subprocess.run(["git", "add", "."], cwd=working_dir, check=True) - commit_msg = "Committing to branch {}".format(branch_name) + commit_msg = "Import snapshot of {} at commit {}".format(remote_url, commit_hash) subprocess.run(["git", "commit", "-m", commit_msg], cwd=working_dir, check=True) # Push back to gitlab external subprocess.run( @@ -678,7 +678,7 @@ def unzipped_repo_to_merge_request( ) if not branch_exists: clone_commit_and_push( - repo_name, unzipped_location, tmp_repo_dir, src_branch_name, remote_url + repo_name, unzipped_location, tmp_repo_dir, src_branch_name, remote_url, commit_hash ) logger.info( "Pushed to {}/{} branch {}".format( From 8c8b8c52ce5601d8eea7a02a5a643120ec972c57 Mon Sep 17 00:00:00 2001 From: Oliver Strickson Date: Wed, 20 May 2020 18:49:55 +0100 Subject: [PATCH 059/155] Use same branch name on source (unapproved) and target (approval) repos for the MR --- .../scripts/zipfile_to_gitlab_project.py | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py b/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py index 0f3e9d263a..e6409a36f9 100644 --- a/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py +++ b/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py @@ -510,18 +510,16 @@ def create_merge_request( def clone_commit_and_push( repo_name, path_to_unzipped_repo, tmp_repo_dir, branch_name, remote_url, commit_hash ): - """ - Run shell commands to convert the unzipped directory containing the - repository contents into a git repo, then commit it to a branch named - as the commit_hash. + """Run shell commands to convert the unzipped directory containing the + repository contents into a git repo, then commit it on the branch + with the requested name. Parameters ========== repo_name: str, name of the repository/project path_to_unzipped_repo: str, the full directory path to the unzipped repo tmp_repo_dir: str, path to a temporary dir where we will clone the project - branch_name: str, original commit hash from the external git repo, will - be used as the name of the branch to push to + branch_name: str, the name of the branch to push to remote_url: str, the URL for this project on gitlab-external to be added as a "remote". """ @@ -536,7 +534,7 @@ def clone_commit_and_push( cwd=working_dir, check=True, ) - # Create a branch named after the original commit hash + # Create the branch with the requested name subprocess.run(["git", "checkout", "-b", branch_name], cwd=working_dir, check=True) # Commit everything to this branch, also putting commit hash into message subprocess.run(["git", "add", "."], cwd=working_dir, check=True) @@ -631,8 +629,11 @@ def unzipped_repo_to_merge_request( ) logger.info("Created project {}/{} ".format(group_names[1], repo_name)) + # Branch to create on the source (unapproved) repository of the + # matches that of the target + src_branch_name = target_branch_name + # Check if we already have a Merge Request - if so we can just skip to the end - src_branch_name = "branch-{}".format(commit_hash) mr_exists = check_if_merge_request_exists( src_branch_name, target_project_id, From f4c0a9eb6f3e3e4fb41b5d32e64c1af073b2c586 Mon Sep 17 00:00:00 2001 From: Jack Roberts Date: Thu, 21 May 2020 09:47:32 +0100 Subject: [PATCH 060/155] Change public groups and projects to internal --- .../cloud_init/cloud-init-gitlab-external.template.yaml | 4 ++-- .../cloud_init/scripts/zipfile_to_gitlab_project.py | 2 +- .../create_rds/templates/Deploy_RDS_Environment.template.ps1 | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml index b167bb652a..e530dd9202 100644 --- a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml +++ b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml @@ -205,8 +205,8 @@ runcmd: chown : "/home//.ssh/known_hosts" # Create groups for storing unapproved and approval repos - | - curl --header "Authorization: Bearer " --data "name=approval&path=approval&visibility=public" /api/v4/groups; - curl --header "Authorization: Bearer " --data "name=unapproved&path=unapproved&visibility=public" /api/v4/groups + curl --header "Authorization: Bearer " --data "name=approval&path=approval&visibility=internal" /api/v4/groups; + curl --header "Authorization: Bearer " --data "name=unapproved&path=unapproved&visibility=internal" /api/v4/groups # -------------------------------- # FINAL SETUP # -------------------------------- diff --git a/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py b/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py index e6409a36f9..c27a17ec2a 100644 --- a/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py +++ b/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py @@ -305,7 +305,7 @@ def create_project(repo_name, namespace_id, gitlab_url, gitlab_token): data={ "name": repo_name, "path": repo_name, - "visibility": "public", + "visibility": "internal", "namespace_id": namespace_id, "initialize_with_readme": True, }, diff --git a/deployment/secure_research_environment/remote/create_rds/templates/Deploy_RDS_Environment.template.ps1 b/deployment/secure_research_environment/remote/create_rds/templates/Deploy_RDS_Environment.template.ps1 index 4874a0077a..6ddd1e7bef 100644 --- a/deployment/secure_research_environment/remote/create_rds/templates/Deploy_RDS_Environment.template.ps1 +++ b/deployment/secure_research_environment/remote/create_rds/templates/Deploy_RDS_Environment.template.ps1 @@ -86,7 +86,7 @@ foreach($rdsConfiguration in @(("Applications", "", "" -Alias "chrome (1)" -DisplayName "Code Review" -FilePath "C:\Program Files (x86)\Google\Chrome\Application\chrome.exe" -ShowInWebAccess 1 -CommandLineSetting Require -RequiredCommandLine "http://.151/groups/approval/-/merge_requests" -CollectionName "Review" -ErrorAction Stop + $_ = New-RDRemoteApp -ConnectionBroker "" -Alias "chrome (1)" -DisplayName "Code Review" -FilePath "C:\Program Files (x86)\Google\Chrome\Application\chrome.exe" -ShowInWebAccess 1 -CommandLineSetting Require -RequiredCommandLine "http://.151" -CollectionName "Review" -ErrorAction Stop $_ = New-RDRemoteApp -ConnectionBroker "" -Alias "mstsc (1)" -DisplayName "DSVM Main (Desktop)" -FilePath "C:\Windows\system32\mstsc.exe" -ShowInWebAccess 1 -CommandLineSetting Require -RequiredCommandLine "-v .160" -CollectionName "Applications" -ErrorAction Stop $_ = New-RDRemoteApp -ConnectionBroker "" -Alias "mstsc (2)" -DisplayName "DSVM Other (Desktop)" -FilePath "C:\Windows\system32\mstsc.exe" -ShowInWebAccess 1 -CollectionName "Applications" -ErrorAction Stop $_ = New-RDRemoteApp -ConnectionBroker "" -Alias "chrome (2)" -DisplayName "GitLab" -FilePath "C:\Program Files (x86)\Google\Chrome\Application\chrome.exe" -ShowInWebAccess 1 -CommandLineSetting Require -RequiredCommandLine "http://.151" -CollectionName "Applications" -ErrorAction Stop From 162e611ec4efe5e6a2425305d66071ce359ef595 Mon Sep 17 00:00:00 2001 From: Oliver Strickson Date: Thu, 21 May 2020 10:23:54 +0100 Subject: [PATCH 061/155] Change default branch and commit a README file to 'approval' --- .../scripts/zipfile_to_gitlab_project.py | 71 +++++++++++++++++-- 1 file changed, 67 insertions(+), 4 deletions(-) diff --git a/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py b/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py index c27a17ec2a..8e7b19894d 100644 --- a/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py +++ b/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py @@ -307,7 +307,7 @@ def create_project(repo_name, namespace_id, gitlab_url, gitlab_token): "path": repo_name, "visibility": "internal", "namespace_id": namespace_id, - "initialize_with_readme": True, + "default_branch": "_gitlab_ingress_review" }, ) assert response.json()["name"] == repo_name @@ -317,6 +317,67 @@ def create_project(repo_name, namespace_id, gitlab_url, gitlab_token): repo_name, namespace_id, project_info["id"] ) ) + # make the initial commit of README initialized with some instructions + README = f""" +# {repo_name} + +This is the root commit of the repository holding snapshots of the +reqested Git repository, at the commits that have been requested for +review. + +For guidance on the Safe Haven review process, see the Safe Haven +documentation, or contact .... + +## For Reviewers + +There is a merge request into this repository (`approval/{repo_name}`) +for each ingress request. + +Please look at each merge request in turn, and review it using the +usual GitLab review facilities to determine whether it can be brought +into the user-visible GitLab within the Safe Haven. + +- If you approve of making this snapshot available to the environment, + indicate your approval by leaving a "thumbs up" reaction to the top + comment of the Merge Request. +- Two such approvals are **required** before the merge request will be + **automatically merged** and brought into the user-visible GitLab in + the Research Environment. +- Any "unresolved threads" will prevent the merge so make sure that + all comment threads in the discussion have been marked as resolved. + +**Important**: Once the repository has had two approvals, the merge +will be made automatically. This could take up to 10 minutes. There +is no need (and you will not have the capability) to merge manually. + +## For Safe Haven Users + +The branches of this repository contain snapshots at the individual +commits that have been requested and approved by the Safe Haven Git +Ingress process. The commit history is not kept. For more on this +process, see the Safe Haven documentation. This commit will be the +root of each of these branches, and the contents of this file will be +overwritten (or removed) by the contents of the requested repository, +so if you are reading this, it is likely that you are browsing the +commit history. +""" + # Make the first commit to the project with the README + project_commit_url = f"{gitlab_url}/projects/{project_info['id']}/repository/commits" + response = requests.post( + project_commit_url, + headers={"Authorization": "Bearer " + gitlab_token}, + json={ + "branch": "_gitlab_ingress_review", + "commit_message": "Initial commit", + "actions": [ + { + "action": "create", + "file_path": "README.md", + "content": README + } + ] + } + ) return project_info @@ -353,11 +414,13 @@ def check_if_branch_exists(branch_name, project_id, gitlab_url, gitlab_token): def create_branch( - branch_name, project_id, gitlab_url, gitlab_token, reference_branch="master" + branch_name, project_id, gitlab_url, gitlab_token, + reference_branch="_gitlab_ingress_review" ): """ - Create a new branch on an existing project. By default, use 'master' - as the reference branch from which to create the new one. + Create a new branch on an existing project. By default, use + '_gitlab_ingress_review' (which is unlikely to exist in the source + repo) as the reference branch from which to create the new one. Parameters ========== From dfd1dc9c1d9e4a5e169aceb1ac89f7d62d5abdbb Mon Sep 17 00:00:00 2001 From: Oliver Strickson Date: Thu, 21 May 2020 18:42:30 +0100 Subject: [PATCH 062/155] Move docstrings into a separate file, to fit within the character limit for provisioning --- .../scripts/zipfile_to_gitlab_project.py | 279 ------------------ .../scripts/zipfile_to_gitlab_project_doc.py | 274 +++++++++++++++++ 2 files changed, 274 insertions(+), 279 deletions(-) create mode 100644 deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project_doc.py diff --git a/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py b/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py index 8e7b19894d..f68533979f 100644 --- a/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py +++ b/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py @@ -1,29 +1,4 @@ #!/usr/bin/env python3 - -""" -Start from zipfile of a particular commit - should have filename -of the form __.zip - -We want to turn this into a merge request on a Gitlab project. - -1) get useful gitlab stuff (url, api key, namespace_ids for our groups) -2) unzip zipfiles in specified directory -3) loop over unzipped repos. For each one: - a) see if "approval" project with same name exists, if not, create it - b) check if merge request to "approval/" with source and target branches - "commit-" and "" already exists. - If so, skip to the next unzipped repo. - b) see if "unapproved" project with same name exists, if not, fork "approval" one - c) clone "unapproved" project, and create branch called "commit-" - d) copy in contents of unzipped repo. - e) git add, commit and push to "unapproved" project - f) create branch "" on "approval" project - g) create merge request from unapproved/repo_name/commit_hash to - approval/repo_name/desired_branch_name -4) clean up - remove zipfiles and unpacked repos. -""" - - import os import shutil import re @@ -49,20 +24,6 @@ def unzip_zipfiles(zipfile_dir, tmp_unzipped_dir): - """ - Parameters - ========== - zipfile_dir: str, path to directory containing zipfiles - tmp_unzipped_dir: str, path to directory where zipfiles will be unzipped - - Returns - ======= - output_list: list of tuples - [(repo_name, commit_hash, desired_branch, unzipped-path),...] - - Note that the convention for the zipfile filenames is - __.zip - """ output_list = [] repo_commit_regex = re.compile("([-\w]+)_([a-f\d]+)_([\S]+).zip") # tear down and recreate the directory where we will put the unpacked zip @@ -99,10 +60,6 @@ def unzip_zipfiles(zipfile_dir, tmp_unzipped_dir): def get_gitlab_config(): - """ - Return a dictionary containing the base URL for the gitlab API, - the API token, the IP address, and the headers to go in any request - """ home = str(Path.home()) with open(f"{home}/.secrets/gitlab-external-ip-address", "r") as f: @@ -119,21 +76,6 @@ def get_gitlab_config(): def get_group_namespace_ids( gitlab_url, gitlab_token, groups=["approval", "unapproved"] ): - """ - Find the namespace_id corresponding to the groups we're interested in, - e.g. 'approval' and 'unapproved'. - - Parameters - ========== - gitlab_url: str, base URL for the API - gitlab_token: str, API token for Gitlab - groups: list of string, the group names to look for. - - Returns - ======= - namespace_id_dict: dict, format {: } - - """ namespaces_url = "{}/namespaces/".format(gitlab_url) response = requests.get( namespaces_url, headers={"Authorization": "Bearer " + gitlab_token} @@ -151,20 +93,6 @@ def get_group_namespace_ids( def get_gitlab_project_list(gitlab_url, gitlab_token): - """ - Get the list of Projects. - - Parameters - ========== - namespace_id: int, ID of the group ("unapproved" or "approval") - gitlab_url: str, base URL for the API - gitlab_token: str, API token. - - Returns - ======= - gitlab_projects: list of dictionaries. - """ - # list currently existing projects on Gitlab projects_url = "{}/projects/".format(gitlab_url) response = requests.get( @@ -182,21 +110,6 @@ def get_gitlab_project_list(gitlab_url, gitlab_token): def check_if_project_exists(repo_name, namespace_id, gitlab_url, gitlab_token): - """ - Get a list of projects from the API - check if namespace_id (i.e. group) - and name match. - - Parameters - ========== - repo_name: str, name of our repository/project - namespace_id: int, id of our group ("unapproved" or "approval") - gitlab_url: str, base URL of Gitlab API - gitlab_token: str, API key for Gitlab API. - - Returns - ======= - bool, True if project exists, False otherwise. - """ projects = get_gitlab_project_list(gitlab_url, gitlab_token) for project in projects: if project["name"] == repo_name and project["namespace"]["id"] == namespace_id: @@ -205,21 +118,6 @@ def check_if_project_exists(repo_name, namespace_id, gitlab_url, gitlab_token): def get_project_info(repo_name, namespace_id, gitlab_url, gitlab_token): - """ - Check if project exists, and if so get its ID. Otherwise, create - it and return the ID. - - Parameters - ========== - repo_name: str, name of our repository/project - namespace_id: int, id of our group ("unapproved" or "approval") - gitlab_url: str, base URL of Gitlab API - gitlab_token: str, API key for Gitlab API. - - Returns - ======= - project_info: dict, containing info from the projects API endpoint - """ already_exists = check_if_project_exists( repo_name, namespace_id, gitlab_url, gitlab_token ) @@ -237,67 +135,18 @@ def get_project_info(repo_name, namespace_id, gitlab_url, gitlab_token): def get_project_remote_url(repo_name, namespace_id, gitlab_url, gitlab_token): - """ - Given the name of a repository and namespace_id (i.e. group, - "unapproved" or "approval"), either return the remote URL for project - matching the repo name, or create it if it doesn't exist already, - and again return the remote URL. - - Parameters - ========== - repo_name: str, name of the repository/project we're looking for. - namespace_id: int, the ID of the group ("unapproved" or "approval") - gitlab_url: str, base URL of the API - gitlab_token: str, API key - - Returns - ======= - gitlab_project_url: str, the URL to be set as the "remote". - """ project_info = get_project_info(repo_name, namespace_id, gitlab_url, gitlab_token) return project_info["ssh_url_to_repo"] def get_project_id(repo_name, namespace_id, gitlab_url, gitlab_token): - """ - Given the name of a repository and namespace_id (i.e. group, - "unapproved" or "approval"), either return the id of project - matching the repo name, or create it if it doesn't exist already, - and again return the id. - - Parameters - ========== - repo_name: str, name of the repository/project we're looking for. - namespace_id: int, the ID of the group ("unapproved" or "approval") - gitlab_url: str, base URL of the API - gitlab_token: str, API key - - Returns - ======= - gitlab_project_url: str, the URL to be set as the "remote". - """ project_info = get_project_info(repo_name, namespace_id, gitlab_url, gitlab_token) return project_info["id"] def create_project(repo_name, namespace_id, gitlab_url, gitlab_token): - """ - Create empty project on gitlab, and return the corresponding remote URL. - - Parameters - ========== - repo_name: str, name of the repository/project - namespace_id: int, ID of the group ("unapproved" or "approved") - gitlab_url: str, base URL of the API - gitlab_token: str, API token. - - Returns - ======= - gitlab_project_info: dict, containing among other things, the name and - the remote URL for the project. - """ projects_url = "{}projects/".format(gitlab_url) response = requests.post( projects_url, @@ -382,20 +231,6 @@ def create_project(repo_name, namespace_id, gitlab_url, gitlab_token): def check_if_branch_exists(branch_name, project_id, gitlab_url, gitlab_token): - """ - See if a branch with name branch_name already exists on this Project - - Parameters - ========== - branch_name: str, name of branch to look for - project_id: int, id of the project, obtained from projects API endpoint - gitlab_url: base URL of the Gitlab API - gitlab_token: API token for the Gitlab API - - Returns - ======= - branch_exists: bool, True if branch exists, False if not. - """ branches_url = "{}/projects/{}/repository/branches".format(gitlab_url, project_id) response = requests.get( branches_url, headers={"Authorization": "Bearer " + gitlab_token} @@ -417,24 +252,6 @@ def create_branch( branch_name, project_id, gitlab_url, gitlab_token, reference_branch="_gitlab_ingress_review" ): - """ - Create a new branch on an existing project. By default, use - '_gitlab_ingress_review' (which is unlikely to exist in the source - repo) as the reference branch from which to create the new one. - - Parameters - ========== - branch_name: str, the desired name of the new branch - project_id: int, the ID of the project, which is the "id" value in - the dictionary of project information returned when - creating a new project or listing existing ones. - gitlab_url: str, the base URL for the Gitlab API - gitlab_token: str, the Gitlab API token - - Returns - ======= - branch_info: dict, info about the branch from API endpoint - """ # assume branch doesn't already exist - create it! branch_url = "{}/projects/{}/repository/branches".format(gitlab_url, project_id) response = requests.post( @@ -454,24 +271,6 @@ def create_branch( def check_if_merge_request_exists( source_branch, target_project_id, target_branch, gitlab_url, gitlab_token ): - """ - See if there is an existing merge request between the source and target - project/branch combinations. - - Parameters - ========== - source_branch: str, name of the branch on source project, will typically - be the commit_hash from the original repo. - target_project_id: int, project_id for the "approval" group's project. - target_branch: str, name of branch on target project, will typically - be the desired branch name. - gitlab_url: str, base URL for the Gitlab API - gitlab_token: str, API token for the Gitlab API. - - Returns - ======= - bool, True if merge request already exists, False otherwise - """ mr_url = "{}/projects/{}/merge_requests".format(gitlab_url, target_project_id) response = requests.get(mr_url, headers={"Authorization": "Bearer " + gitlab_token}) if response.status_code != 200: @@ -503,30 +302,6 @@ def create_merge_request( gitlab_url, gitlab_token, ): - - """ - Create a new MR, e.g. from the branch in the "unapproved" - group's project, to the branch in the "approval" - group's project. - - Parameters - ========== - repo_name: str, name of the repository - source_project_id: int, project_id for the unapproved project, obtainable - as the "ID" field of the json returned from the - projects API endpoint. - source_branch: str, name of the branch on source project, will typically - be the 'branch-'. - target_project_id: int, project_id for the "approval" group's project. - target_branch: str, name of branch on target project, will typically - be the desired branch name. - gitlab_url: str, base URL for the Gitlab API - gitlab_token: str, API token for the Gitlab API. - - Returns - ======= - mr_info: dict, the response from the API upon creating the Merge Request - """ # first need to create a forked-from relationship between the projects fork_url = "{}/projects/{}/fork/{}".format( gitlab_url, source_project_id, target_project_id @@ -573,19 +348,6 @@ def create_merge_request( def clone_commit_and_push( repo_name, path_to_unzipped_repo, tmp_repo_dir, branch_name, remote_url, commit_hash ): - """Run shell commands to convert the unzipped directory containing the - repository contents into a git repo, then commit it on the branch - with the requested name. - - Parameters - ========== - repo_name: str, name of the repository/project - path_to_unzipped_repo: str, the full directory path to the unzipped repo - tmp_repo_dir: str, path to a temporary dir where we will clone the project - branch_name: str, the name of the branch to push to - remote_url: str, the URL for this project on gitlab-external to be added - as a "remote". - """ # Clone the repo subprocess.run(["git", "clone", remote_url], cwd=tmp_repo_dir, check=True) working_dir = os.path.join(tmp_repo_dir, repo_name) @@ -612,22 +374,6 @@ def clone_commit_and_push( def fork_project(repo_name, project_id, namespace_id, gitlab_url, gitlab_token): - """ - Fork the project 'approval/' to 'unapproved/' - after first checking whether the latter exists. - - Parameters - ========== - repo_name: str, name of the repo/project - project_id: int, project id of the 'approval/' project - namespace_id: int, id of the 'unapproved' namespace - gitlab_url: str, str, the base URL of Gitlab API - gitlab_token: str, API token for Gitlab API - - Returns - ======= - new_project_id: int, the id of the newly created 'unapproved/' project - """ already_exists = check_if_project_exists( repo_name, namespace_id, gitlab_url, gitlab_token ) @@ -667,19 +413,6 @@ def fork_project(repo_name, project_id, namespace_id, gitlab_url, gitlab_token): def unzipped_repo_to_merge_request( repo_details, tmp_repo_dir, gitlab_config, namespace_ids, group_names ): - """ - Go through all the steps for a single repo/project. - - Parameters - ========== - repo_details: tuple of strings, (repo_name, hash, desired_branch, location) - tmp_repo_dir: str, directory where we will clone the repo, then copy the contents in - gitlab_config: dict, contains api url and token - namespace_ids; dict, keys are the group names (e.g. "unapproved", "approval", values - are the ids of the corresponding namespaces in Gitlab - group_names: list of strings, typically ["unapproved", "approval"] - """ - # unpack tuple repo_name, commit_hash, target_branch_name, unzipped_location = repo_details logger.info("Unpacked {} {} {}".format(repo_name, commit_hash, target_branch_name)) @@ -801,17 +534,6 @@ def unzipped_repo_to_merge_request( def cleanup(zipfile_dir, tmp_unzipped_dir, tmp_repo_dir): - """ - Remove directories and files after everything has been uploaded to gitlab - - Parameters - ========== - zipfile_dir: str, directory containing the original zipfiles. Will not remove this - directory, but we will delete all the zipfiles in it. - tmp_unzipped_dir: str, directory where the unpacked zipfile contents are put. Remove. - tmp_repo_dir: str, directory where projects are cloned from Gitlab, then contents from - tmp_unzipped_dir are copied in. Remove. - """ logger.info(" === cleaning up ======") shutil.rmtree(tmp_unzipped_dir, ignore_errors=True) logger.info("Removed directory {}".format(tmp_unzipped_dir)) @@ -828,7 +550,6 @@ def cleanup(zipfile_dir, tmp_unzipped_dir, tmp_repo_dir): def main(): - ZIPFILE_DIR = "/tmp/zipfiles" os.makedirs(ZIPFILE_DIR, exist_ok=True) # create a directory to unpack the zipfiles into diff --git a/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project_doc.py b/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project_doc.py new file mode 100644 index 0000000000..48a103c987 --- /dev/null +++ b/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project_doc.py @@ -0,0 +1,274 @@ +from zipfile_to_gitlab_project import * + +__doc__ = """ +Start from zipfile of a particular commit - should have filename +of the form __.zip + +We want to turn this into a merge request on a Gitlab project. + +1) get useful gitlab stuff (url, api key, namespace_ids for our groups) +2) unzip zipfiles in specified directory +3) loop over unzipped repos. For each one: + a) see if "approval" project with same name exists, if not, create it + b) check if merge request to "approval/" with source and target branches + "commit-" and "" already exists. + If so, skip to the next unzipped repo. + b) see if "unapproved" project with same name exists, if not, fork "approval" one + c) clone "unapproved" project, and create branch called "commit-" + d) copy in contents of unzipped repo. + e) git add, commit and push to "unapproved" project + f) create branch "" on "approval" project + g) create merge request from unapproved/repo_name/commit_hash to + approval/repo_name/desired_branch_name +4) clean up - remove zipfiles and unpacked repos. +""" + +unzip_zipfiles.__doc__ = """ +Parameters +========== +zipfile_dir: str, path to directory containing zipfiles +tmp_unzipped_dir: str, path to directory where zipfiles will be unzipped + +Returns +======= +output_list: list of tuples +[(repo_name, commit_hash, desired_branch, unzipped-path),...] + +Note that the convention for the zipfile filenames is +__.zip +""" + +get_gitlab_config.__doc__ = """ +Return a dictionary containing the base URL for the gitlab API, +the API token, the IP address, and the headers to go in any request +""" + +get_group_namespace_ids.__doc__ = """ +Find the namespace_id corresponding to the groups we're interested in, +e.g. 'approval' and 'unapproved'. + +Parameters +========== +gitlab_url: str, base URL for the API +gitlab_token: str, API token for Gitlab +groups: list of string, the group names to look for. + +Returns +======= +namespace_id_dict: dict, format {: } +""" + +get_gilab_project_list.__doc__ = """ +Get the list of Projects. + +Parameters +========== +namespace_id: int, ID of the group ("unapproved" or "approval") +gitlab_url: str, base URL for the API +gitlab_token: str, API token. + +Returns +======= +gitlab_projects: list of dictionaries. +""" + +check_if_project_exists.__doc__ = """ +Get a list of projects from the API - check if namespace_id (i.e. group) +and name match. + +Parameters +========== +repo_name: str, name of our repository/project +namespace_id: int, id of our group ("unapproved" or "approval") +gitlab_url: str, base URL of Gitlab API +gitlab_token: str, API key for Gitlab API. + +Returns +======= +bool, True if project exists, False otherwise. +""" + +get_project_info.__doc__ = """ +Check if project exists, and if so get its ID. Otherwise, create +it and return the ID. + +Parameters +========== +repo_name: str, name of our repository/project +namespace_id: int, id of our group ("unapproved" or "approval") +gitlab_url: str, base URL of Gitlab API +gitlab_token: str, API key for Gitlab API. + +Returns +======= +project_info: dict, containing info from the projects API endpoint +""" + +get_project_id.__doc__ = """ +Given the name of a repository and namespace_id (i.e. group, +"unapproved" or "approval"), either return the remote URL for project +matching the repo name, or create it if it doesn't exist already, +and again return the remote URL. + +Parameters +========== +repo_name: str, name of the repository/project we're looking for. +namespace_id: int, the ID of the group ("unapproved" or "approval") +gitlab_url: str, base URL of the API +gitlab_token: str, API key + +Returns +======= +gitlab_project_url: str, the URL to be set as the "remote". +""" + +create_project.__doc__ = """ +Create empty project on gitlab, and return the corresponding remote URL. + +Parameters +========== +repo_name: str, name of the repository/project +namespace_id: int, ID of the group ("unapproved" or "approved") +gitlab_url: str, base URL of the API +gitlab_token: str, API token. + +Returns +======= +gitlab_project_info: dict, containing among other things, the name and +the remote URL for the project. +""" + +check_if_branch_exists.__doc__ = """ +See if a branch with name branch_name already exists on this Project + +Parameters +========== +branch_name: str, name of branch to look for +project_id: int, id of the project, obtained from projects API endpoint +gitlab_url: base URL of the Gitlab API +gitlab_token: API token for the Gitlab API + +Returns +======= +branch_exists: bool, True if branch exists, False if not. +""" + +create_branch.__doc__ = """ +Create a new branch on an existing project. By default, use +'_gitlab_ingress_review' (which is unlikely to exist in the source +repo) as the reference branch from which to create the new one. + +Parameters +========== +branch_name: str, the desired name of the new branch +project_id: int, the ID of the project, which is the "id" value in +the dictionary of project information returned when +creating a new project or listing existing ones. +gitlab_url: str, the base URL for the Gitlab API +gitlab_token: str, the Gitlab API token + +Returns +======= +branch_info: dict, info about the branch from API endpoint +""" + +check_if_merge_request_exists.__doc__ = """ +See if there is an existing merge request between the source and target +project/branch combinations. + +Parameters +========== +source_branch: str, name of the branch on source project, will typically +be the commit_hash from the original repo. +target_project_id: int, project_id for the "approval" group's project. +target_branch: str, name of branch on target project, will typically +be the desired branch name. +gitlab_url: str, base URL for the Gitlab API +gitlab_token: str, API token for the Gitlab API. + +Returns +======= +bool, True if merge request already exists, False otherwise +""" + +create_merge_request.__doc__ = """ +Create a new MR, e.g. from the branch in the "unapproved" +group's project, to the branch in the "approval" +group's project. + +Parameters +========== +repo_name: str, name of the repository +source_project_id: int, project_id for the unapproved project, obtainable +as the "ID" field of the json returned from the +projects API endpoint. +source_branch: str, name of the branch on source project, will typically +be the 'branch-'. +target_project_id: int, project_id for the "approval" group's project. +target_branch: str, name of branch on target project, will typically +be the desired branch name. +gitlab_url: str, base URL for the Gitlab API +gitlab_token: str, API token for the Gitlab API. + +Returns +======= +mr_info: dict, the response from the API upon creating the Merge Request +""" + + +clone_commit_and_push.__doc__ = """ +Run shell commands to convert the unzipped directory containing the +repository contents into a git repo, then commit it on the branch +with the requested name. + +Parameters +========== +repo_name: str, name of the repository/project +path_to_unzipped_repo: str, the full directory path to the unzipped repo +tmp_repo_dir: str, path to a temporary dir where we will clone the project +branch_name: str, the name of the branch to push to +remote_url: str, the URL for this project on gitlab-external to be added +as a "remote". +""" + +fork_project.__doc__ = """ +Fork the project 'approval/' to 'unapproved/' +after first checking whether the latter exists. + +Parameters +========== +repo_name: str, name of the repo/project +project_id: int, project id of the 'approval/' project +namespace_id: int, id of the 'unapproved' namespace +gitlab_url: str, str, the base URL of Gitlab API +gitlab_token: str, API token for Gitlab API + +Returns +======= +new_project_id: int, the id of the newly created 'unapproved/' project +""" + +unzipped_repo_to_merge_request = """ +Go through all the steps for a single repo/project. + +Parameters +========== +repo_details: tuple of strings, (repo_name, hash, desired_branch, location) +tmp_repo_dir: str, directory where we will clone the repo, then copy the contents in +gitlab_config: dict, contains api url and token +namespace_ids; dict, keys are the group names (e.g. "unapproved", "approval", values +are the ids of the corresponding namespaces in Gitlab +group_names: list of strings, typically ["unapproved", "approval"] +""" + +cleanup.__doc__ = """ +Remove directories and files after everything has been uploaded to gitlab + +Parameters +========== +zipfile_dir: str, directory containing the original zipfiles. Will not remove this +directory, but we will delete all the zipfiles in it. +tmp_unzipped_dir: str, directory where the unpacked zipfile contents are put. Remove. +tmp_repo_dir: str, directory where projects are cloned from Gitlab, then contents from +tmp_unzipped_dir are copied in. Remove. +""" From 837b2da01ee4eb611b1e8eb4a8eccfef9304db3a Mon Sep 17 00:00:00 2001 From: Jack Roberts Date: Fri, 29 May 2020 11:08:10 +0100 Subject: [PATCH 063/155] Get gitlab internal ssh keys by invoking remote script * Replace ssh-keyscan to get gitlab internal's ssh keys with invoking a remote script (more secure). * Tidy up cloud init for gitlab external - mostly removing unneeded chown commands (as we recursively give ownership to the home directory and its contents at the end anyway). --- .../cloud-init-gitlab-external.template.yaml | 59 ++++++++----------- .../scripts/check_merge_requests.py | 2 +- .../setup/Setup_SRE_WebApp_Servers.ps1 | 21 +++++++ 3 files changed, 45 insertions(+), 37 deletions(-) diff --git a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml index e530dd9202..54e8662158 100644 --- a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml +++ b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml @@ -96,6 +96,11 @@ write_files: permissions: "0755" content: | + # Populate SSH known hosts with keys from gitlab internal + - path: "/home//.ssh/known_hosts" + permissions: "0600" + content: | + runcmd: # -------------------------------- @@ -141,29 +146,18 @@ runcmd: - gitlab-ctl reconfigure - gitlab-ctl restart # -------------------------------- - # Create SSH key + # CREATE SSH KEY # -------------------------------- - | - mkdir -p /home//.ssh; - ssh-keygen -t ed25519 -C 'gitlab' -N '' -f /home//.ssh/id_ed25519; - chown : "/home//.ssh/id_ed25519" - chown : "/home//.ssh/id_ed25519.pub" + mkdir -p /home//.ssh + ssh-keygen -t ed25519 -C 'gitlab' -N '' -f /home//.ssh/id_ed25519 # -------------------------------- - # SETUP ACCESS TO GITLAB INTERNAL + # REGISTER SSH KEY WITH GITLAB INTERNAL # -------------------------------- - - echo "Configuring access to gitlab internal" - # Change ownership of secrets to - | - chown : "/home//.secrets/gitlab-internal-api-token"; - chown : "/home//.secrets/gitlab-internal-ip-address"; - chown : "/home//.secrets/gitlab-internal-username"; - chown : "/home//.secrets/gitlab-internal-user-email"; - # Create SSH key for gitlab internal access, add gitlab internal to known hosts - - | - key=$(cat /home//.ssh/id_ed25519.pub); - curl --header 'Authorization: Bearer ' --header 'Content-Type:application/json' --data "{\"key\": \"$key\", \"title\": \"InternalAPIUser\"}" /api/v4/user/keys; - ssh-keyscan -H >> /home//.ssh/known_hosts; - chown : "/home//.ssh/known_hosts" + echo "Configuring access to gitlab internal" + key=$(cat /home//.ssh/id_ed25519.pub) + curl --header 'Authorization: Bearer ' --header 'Content-Type:application/json' --data "{\"key\": \"$key\", \"title\": \"InternalAPIUser\"}" /api/v4/user/keys # -------------------------------- # WAIT FOR GITLAB EXTERNAL HEALTH CHECK # -------------------------------- @@ -187,37 +181,26 @@ runcmd: # SETUP ACCESS TO GITLAB EXTERNAL # -------------------------------- - echo "Configuring access to gitlab external" - # Change ownership of secrets to - - | - chown : "/home//.secrets/gitlab-external-api-token"; - chown : "/home//.secrets/gitlab-external-ip-address"; - chown : "/home//.secrets/gitlab-external-username"; - chown : "/home//.secrets/gitlab-external-user-email"; # Create SSH key for gitlab external access - | - key=$(cat /home//.ssh/id_ed25519.pub); - curl --header 'Authorization: Bearer ' --header 'Content-Type:application/json' --data "{\"key\": \"$key\", \"title\": \"ExternalAPIUser\"}" /api/v4/user/keys; + key=$(cat /home//.ssh/id_ed25519.pub) + curl --header 'Authorization: Bearer ' --header 'Content-Type:application/json' --data "{\"key\": \"$key\", \"title\": \"ExternalAPIUser\"}" /api/v4/user/keys # Get local ssh host keys, add them to known hosts under the gitlab external ip - | echo " $(cat /etc/ssh/ssh_host_rsa_key.pub | cut -d " " -f -2)" >> /home//.ssh/known_hosts echo " $(cat /etc/ssh/ssh_host_ed25519_key.pub | cut -d " " -f -2)" >> /home//.ssh/known_hosts echo " $(cat /etc/ssh/ssh_host_ecdsa_key.pub | cut -d " " -f -2)" >> /home//.ssh/known_hosts - chown : "/home//.ssh/known_hosts" # Create groups for storing unapproved and approval repos - | - curl --header "Authorization: Bearer " --data "name=approval&path=approval&visibility=internal" /api/v4/groups; + curl --header "Authorization: Bearer " --data "name=approval&path=approval&visibility=internal" /api/v4/groups curl --header "Authorization: Bearer " --data "name=unapproved&path=unapproved&visibility=internal" /api/v4/groups # -------------------------------- - # FINAL SETUP + # GIT SETUP # -------------------------------- - - echo "Finishing setup" - # Configure global git user to be gitlab internal user - | - HOME=/home/ git config --global user.name ''; - HOME=/home/ git config --global user.email '@'; - # Give ownership of their home directory - - | - chown -R : "/home/"; + echo "Configuring git" + HOME=/home/ git config --global user.name '' + HOME=/home/ git config --global user.email '@' # -------------------------------- # ADD CRONTAB ENTRIES FOR GITLAB SCRIPTS # -------------------------------- @@ -225,6 +208,10 @@ runcmd: - echo "*/10 * * * * /home//zipfile_to_gitlab_project.py" >> /etc/crontab - echo "*** Adding check_merge_requests.py to crontab ***" - echo "5,15,25,35,45,55 * * * * /home//check_merge_requests.py" >> /etc/crontab + # -------------------------------- + # GIVE OWNERSHIP OF THEIR HOME DIRECTORY + # -------------------------------- + - chown -R : "/home/" # Shutdown so that we can tell when the job has finished by polling the VM state power_state: diff --git a/deployment/secure_research_environment/cloud_init/scripts/check_merge_requests.py b/deployment/secure_research_environment/cloud_init/scripts/check_merge_requests.py index 2fc52af7a7..2838bc394f 100644 --- a/deployment/secure_research_environment/cloud_init/scripts/check_merge_requests.py +++ b/deployment/secure_research_environment/cloud_init/scripts/check_merge_requests.py @@ -89,7 +89,7 @@ def internal_project_exists(repo_name, config): def internal_update_repo(git_url, repo_name, branch_name, config): - """Takes a git URL, `git_url`, which should be the URL to the + """Takes a git URL, `git_url`, which should be the SSH URL to the "APPROVED" repo on GITLAB-EXTERNAL, clones it and pushes all branches to the repo `repo_name` owned by 'ingress' on GITLAB-INTERNAL, creating it there first if it doesn't exist. diff --git a/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 b/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 index ce3937fe5f..da3183cda9 100644 --- a/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 +++ b/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 @@ -197,6 +197,27 @@ $gitlabUserFilter = "(&(objectClass=user)(memberOf=CN=" + $config.sre.domain.sec $gitlabExternalCloudInitTemplate = Join-Path $PSScriptRoot ".." "cloud_init" "cloud-init-gitlab-external.template.yaml" | Get-Item | Get-Content -Raw + +# Get public SSH keys from gitlab internal (so it can be added as a known host on gitlab external) +# ------------------------------ +$script = ' +#! /bin/bash +echo " $(cat /etc/ssh/ssh_host_rsa_key.pub | cut -d " " -f -2)" +echo " $(cat /etc/ssh/ssh_host_ed25519_key.pub | cut -d " " -f -2)" +echo " $(cat /etc/ssh/ssh_host_ecdsa_key.pub | cut -d " " -f -2)" +'.Replace('', $config.sre.webapps.gitlab.internal.ip) +$vmNameInternal = $config.sre.webapps.gitlab.internal.vmName +$result = Invoke-RemoteScript -VMName $vmNameInternal -ResourceGroupName $config.sre.webapps.rg -Shell "UnixShell" -Script $script +Add-LogMessage -Level Success "Fetching ssh keys from gitlab internal succeeded" +# Extract everything in between the [stdout] and [stderr] blocks of the result message. i.e. all output of the script. +$internalSshKeys = $result.Value[0].Message | Select-String "\[stdout\]\s*([\s\S]*?)\s*\[stderr\]" +$internalSshKeys = $internalSshKeys.Matches.Groups[1].Value +# Insert keys into cloud init template, maintaining indentation +$indent = " " +$indented_internalSshKeys = $internalSshKeys -split "`n" | ForEach-Object { "${indent}$_" } | Join-String -Separator "`n" +$gitlabExternalCloudInitTemplate = $gitlabExternalCloudInitTemplate.Replace("${indent}", $indented_internalSshKeys) + + # Insert scripts into the cloud-init template # ------------------------------------------- $indent = " " From a4ca58377f79b5e33764fb0d3ffcbcf9d604896c Mon Sep 17 00:00:00 2001 From: Oliver Strickson Date: Mon, 1 Jun 2020 13:41:50 +0100 Subject: [PATCH 064/155] Remove commented code --- .../administration/SRE_Upload_Git_Repo_to_GitlabExternal.ps1 | 1 - 1 file changed, 1 deletion(-) diff --git a/deployment/administration/SRE_Upload_Git_Repo_to_GitlabExternal.ps1 b/deployment/administration/SRE_Upload_Git_Repo_to_GitlabExternal.ps1 index 7acc9d1d32..c22b977bb4 100644 --- a/deployment/administration/SRE_Upload_Git_Repo_to_GitlabExternal.ps1 +++ b/deployment/administration/SRE_Upload_Git_Repo_to_GitlabExternal.ps1 @@ -20,7 +20,6 @@ Import-Module $PSScriptRoot/../common/GenerateSasToken.psm1 -Force # Get config and original context before changing subscription # ------------------------------------------------------------ -#$config = Get-ShmFullConfig $shmId $config = Get-SreConfig $sreId $originalContext = Get-AzContext $_ = Set-AzContext -SubscriptionId $config.sre.subscriptionName From 16573fca5423652eeb26074b9c59d7915df656fc Mon Sep 17 00:00:00 2001 From: Oliver Strickson Date: Mon, 1 Jun 2020 13:43:18 +0100 Subject: [PATCH 065/155] Remove unnecessary logging --- .../administration/SRE_Upload_Git_Repo_to_GitlabExternal.ps1 | 1 - 1 file changed, 1 deletion(-) diff --git a/deployment/administration/SRE_Upload_Git_Repo_to_GitlabExternal.ps1 b/deployment/administration/SRE_Upload_Git_Repo_to_GitlabExternal.ps1 index c22b977bb4..b1400ec91c 100644 --- a/deployment/administration/SRE_Upload_Git_Repo_to_GitlabExternal.ps1 +++ b/deployment/administration/SRE_Upload_Git_Repo_to_GitlabExternal.ps1 @@ -62,7 +62,6 @@ $sreStorageAccount = Deploy-StorageAccount -Name $sreStorageAccountName -Resourc # Create container if not already there $containerName = $config.sre.storage.artifacts.containers.gitlabAirlockName -Add-LogMessage -Level Info "Creating blob storage container $containerName in storage account $sreStorageAccountName ..." $_ = Deploy-StorageContainer -Name $containerName -StorageAccount $sreStorageAccount # delete existing blobs on the container $blobs = @(Get-AzStorageBlob -Container $containerName -Context $sreStorageAccount.Context) From 7e9ab187feaef2344783d691e693a93dade75cbd Mon Sep 17 00:00:00 2001 From: Oliver Strickson Date: Mon, 1 Jun 2020 14:19:48 +0100 Subject: [PATCH 066/155] Factor return out of if/else --- .../cloud_init/scripts/zipfile_to_gitlab_project.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py b/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py index f68533979f..9c80e4cb09 100644 --- a/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py +++ b/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py @@ -387,7 +387,6 @@ def fork_project(repo_name, project_id, namespace_id, gitlab_url, gitlab_token): if response.status_code != 201: raise RuntimeError("Problem creating fork: {}".format(response.content)) new_project_id = response.json()["id"] - return new_project_id else: # project already exists - ensure it is a fork of 'approval/' new_project_id = get_project_id( @@ -406,8 +405,7 @@ def fork_project(repo_name, project_id, namespace_id, gitlab_url, gitlab_token): response.status_code, response.content ) ) - - return new_project_id + return new_project_id def unzipped_repo_to_merge_request( From 1fc2bac8544acbdde36c1b7975eb967b8a332b59 Mon Sep 17 00:00:00 2001 From: Oliver Strickson Date: Mon, 1 Jun 2020 14:26:23 +0100 Subject: [PATCH 067/155] Correct name for the review session host --- .../configure_shm_dc/scripts/Remove_SRE_Users_And_Groups.ps1 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/deployment/secure_research_environment/remote/configure_shm_dc/scripts/Remove_SRE_Users_And_Groups.ps1 b/deployment/secure_research_environment/remote/configure_shm_dc/scripts/Remove_SRE_Users_And_Groups.ps1 index c63d2e7e35..2651a491b5 100644 --- a/deployment/secure_research_environment/remote/configure_shm_dc/scripts/Remove_SRE_Users_And_Groups.ps1 +++ b/deployment/secure_research_environment/remote/configure_shm_dc/scripts/Remove_SRE_Users_And_Groups.ps1 @@ -83,7 +83,7 @@ Remove-SreComputer $rdsDataserverVMName Remove-SreComputer $rdsGatewayVMName Remove-SreComputer $rdsSessionHostAppsVMName Remove-SreComputer $rdsSessionHostDesktopVMName -Remove-SreComputer $rdsSessionHostDesktopVMName +Remove-SreComputer $rdsSessionHostReviewVMName # Remove DSVMs $dsvmPrefix = "SRE-$sreId".Replace(".","-").ToUpper() From 74fd45bf9346674d1d36aeb72af612d8855287ad Mon Sep 17 00:00:00 2001 From: Oliver Strickson Date: Mon, 1 Jun 2020 14:41:43 +0100 Subject: [PATCH 068/155] No template substitution in cloud-init runcmd for GitLab API tokens --- .../cloud-init-gitlab-external.template.yaml | 25 +++++++++++++------ 1 file changed, 17 insertions(+), 8 deletions(-) diff --git a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml index 54e8662158..9ae53a64f9 100644 --- a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml +++ b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml @@ -141,7 +141,7 @@ runcmd: echo "user = User.create(:username => '', :password => '', :password_confirmation => '', :email =>'@', :skip_confirmation => true, :name => '');user.save!;exit;" | gitlab-rails console -e production # Create a API token for the ingress user created above - | - echo "user = User.find_by(username: '');user.personal_access_tokens.create(name: 'apitoken', token_digest: Gitlab::CryptoHelper.sha256(''), impersonation: false, scopes: [:api]);exit;" | gitlab-rails console -e production + echo "user = User.find_by(username: '');user.personal_access_tokens.create(name: 'apitoken', token_digest: Gitlab::CryptoHelper.sha256('$(cat '/home//.secrets/gitlab-external-api-token')'), impersonation: false, scopes: [:api]);exit;" | gitlab-rails console -e production # Reload GitLab configuration and restart GitLab - gitlab-ctl reconfigure - gitlab-ctl restart @@ -155,9 +155,17 @@ runcmd: # REGISTER SSH KEY WITH GITLAB INTERNAL # -------------------------------- - | - echo "Configuring access to gitlab internal" - key=$(cat /home//.ssh/id_ed25519.pub) - curl --header 'Authorization: Bearer ' --header 'Content-Type:application/json' --data "{\"key\": \"$key\", \"title\": \"InternalAPIUser\"}" /api/v4/user/keys + chown : "/home//.secrets/gitlab-internal-api-token"; + chown : "/home//.secrets/gitlab-internal-ip-address"; + chown : "/home//.secrets/gitlab-internal-username"; + chown : "/home//.secrets/gitlab-internal-user-email"; + # Create SSH key for gitlab internal access, add gitlab internal to known hosts + - | + key=$(cat /home//.ssh/id_ed25519.pub); + curl --header "Authorization: Bearer $(cat '/home//.secrets/gitlab-internal-api-token')" --header 'Content-Type:application/json' --data "{\"key\": \"$key\", \"title\": \"InternalAPIUser\"}" /api/v4/user/keys; + ssh-keyscan -H >> /home//.ssh/known_hosts; + chown : "/home//.ssh/known_hosts" + # -------------------------------- # WAIT FOR GITLAB EXTERNAL HEALTH CHECK # -------------------------------- @@ -183,8 +191,8 @@ runcmd: - echo "Configuring access to gitlab external" # Create SSH key for gitlab external access - | - key=$(cat /home//.ssh/id_ed25519.pub) - curl --header 'Authorization: Bearer ' --header 'Content-Type:application/json' --data "{\"key\": \"$key\", \"title\": \"ExternalAPIUser\"}" /api/v4/user/keys + key=$(cat /home//.ssh/id_ed25519.pub); + curl --header "Authorization: Bearer $(cat '/home//.secrets/gitlab-external-api-token')" --header 'Content-Type:application/json' --data "{\"key\": \"$key\", \"title\": \"ExternalAPIUser\"}" /api/v4/user/keys; # Get local ssh host keys, add them to known hosts under the gitlab external ip - | echo " $(cat /etc/ssh/ssh_host_rsa_key.pub | cut -d " " -f -2)" >> /home//.ssh/known_hosts @@ -192,8 +200,9 @@ runcmd: echo " $(cat /etc/ssh/ssh_host_ecdsa_key.pub | cut -d " " -f -2)" >> /home//.ssh/known_hosts # Create groups for storing unapproved and approval repos - | - curl --header "Authorization: Bearer " --data "name=approval&path=approval&visibility=internal" /api/v4/groups - curl --header "Authorization: Bearer " --data "name=unapproved&path=unapproved&visibility=internal" /api/v4/groups + curl --header "Authorization: Bearer $(cat '/home//.secrets/gitlab-external-api-token')" --data "name=approval&path=approval&visibility=internal" /api/v4/groups; + curl --header "Authorization: Bearer $(cat '/home//.secrets/gitlab-external-api-token')" --data "name=unapproved&path=unapproved&visibility=internal" /api/v4/groups + # -------------------------------- # GIT SETUP # -------------------------------- From aa05a618bdc1c923d92775dda8fc879e37843f6f Mon Sep 17 00:00:00 2001 From: Oliver Strickson Date: Mon, 1 Jun 2020 15:31:26 +0100 Subject: [PATCH 069/155] Factor out deploying empty blob storage container --- .../SRE_Upload_Git_Repo_to_GitlabExternal.ps1 | 18 +---------- deployment/common/Deployments.psm1 | 30 +++++++++++++++++++ .../setup/Setup_SRE_VNET_RDS.ps1 | 17 +---------- 3 files changed, 32 insertions(+), 33 deletions(-) diff --git a/deployment/administration/SRE_Upload_Git_Repo_to_GitlabExternal.ps1 b/deployment/administration/SRE_Upload_Git_Repo_to_GitlabExternal.ps1 index b1400ec91c..bd3d0d0513 100644 --- a/deployment/administration/SRE_Upload_Git_Repo_to_GitlabExternal.ps1 +++ b/deployment/administration/SRE_Upload_Git_Repo_to_GitlabExternal.ps1 @@ -62,23 +62,7 @@ $sreStorageAccount = Deploy-StorageAccount -Name $sreStorageAccountName -Resourc # Create container if not already there $containerName = $config.sre.storage.artifacts.containers.gitlabAirlockName -$_ = Deploy-StorageContainer -Name $containerName -StorageAccount $sreStorageAccount -# delete existing blobs on the container -$blobs = @(Get-AzStorageBlob -Container $containerName -Context $sreStorageAccount.Context) -$numBlobs = $blobs.Length -if ($numBlobs -gt 0) { - Add-LogMessage -Level Info "[ ] deleting $numBlobs blobs aready in container '$containerName'..." - $blobs | ForEach-Object { Remove-AzStorageBlob -Blob $_.Name -Container $containerName -Context $sreStorageAccount.Context -Force } - while ($numBlobs -gt 0) { - Start-Sleep -Seconds 5 - $numBlobs = (Get-AzStorageBlob -Container $containerName -Context $sreStorageAccount.Context).Length - } - if ($?) { - Add-LogMessage -Level Success "Blob deletion succeeded" - } else { - Add-LogMessage -Level Fatal "Blob deletion failed!" - } -} +$_ = Deploy-EmptyStorageContainer -Name $containerName -StorageAccount $sreStorageAccount # copy zipfile to blob storage # ---------------------------- diff --git a/deployment/common/Deployments.psm1 b/deployment/common/Deployments.psm1 index 4c01e50650..7ebfc153c1 100644 --- a/deployment/common/Deployments.psm1 +++ b/deployment/common/Deployments.psm1 @@ -319,6 +319,36 @@ function Deploy-StorageContainer { Export-ModuleMember -Function Deploy-StorageContainer +# Create storage container and ensure it is empty +# ----------------------------------------------- +function Deploy-EmptyStorageContainer { + param( + [Parameter(Mandatory = $true, HelpMessage = "Name of storage container to deploy")] + $Name, + [Parameter(Mandatory = $true, HelpMessage = "Name of storage account to deploy into")] + $StorageAccount + ) + $_ = Deploy-StorageContainer -Name $Name -StorageAccount $StorageAccount + # delete existing blobs on the container + $blobs = @(Get-AzStorageBlob -Container $Name -Context $StorageAccount.Context) + $numBlobs = $blobs.Length + if ($numBlobs -gt 0) { + Add-LogMessage -Level Info "[ ] deleting $numBlobs blobs aready in container '$Name'..." + $blobs | ForEach-Object { Remove-AzStorageBlob -Blob $_.Name -Container $Name -Context $StorageAccount.Context -Force } + while ($numBlobs -gt 0) { + Start-Sleep -Seconds 5 + $numBlobs = (Get-AzStorageBlob -Container $Name -Context $StorageAccount.Context).Length + } + if ($?) { + Add-LogMessage -Level Success "Blob deletion succeeded" + } else { + Add-LogMessage -Level Fatal "Blob deletion failed!" + } + } +} +Export-ModuleMember -Function Deploy-EmptyStorageContainer + + # Create Linux virtual machine if it does not exist # ------------------------------------------------- function Deploy-UbuntuVirtualMachine { diff --git a/deployment/secure_research_environment/setup/Setup_SRE_VNET_RDS.ps1 b/deployment/secure_research_environment/setup/Setup_SRE_VNET_RDS.ps1 index 7236e8648b..3ce9f165ed 100644 --- a/deployment/secure_research_environment/setup/Setup_SRE_VNET_RDS.ps1 +++ b/deployment/secure_research_environment/setup/Setup_SRE_VNET_RDS.ps1 @@ -212,22 +212,7 @@ Deploy-ArmTemplate -TemplatePath (Join-Path $PSScriptRoot ".." "arm_templates" " # --------------------------------------------- Add-LogMessage -Level Info "Creating blob storage containers in storage account '$($sreStorageAccount.StorageAccountName)'..." foreach ($containerName in ($containerNameGateway, $containerNameSessionHosts)) { - $_ = Deploy-StorageContainer -Name $containerName -StorageAccount $sreStorageAccount - $blobs = @(Get-AzStorageBlob -Container $containerName -Context $sreStorageAccount.Context) - $numBlobs = $blobs.Length - if ($numBlobs -gt 0) { - Add-LogMessage -Level Info "[ ] deleting $numBlobs blobs aready in container '$containerName'..." - $blobs | ForEach-Object { Remove-AzStorageBlob -Blob $_.Name -Container $containerName -Context $sreStorageAccount.Context -Force } - while ($numBlobs -gt 0) { - Start-Sleep -Seconds 5 - $numBlobs = (Get-AzStorageBlob -Container $containerName -Context $sreStorageAccount.Context).Length - } - if ($?) { - Add-LogMessage -Level Success "Blob deletion succeeded" - } else { - Add-LogMessage -Level Fatal "Blob deletion failed!" - } - } + Deploy-EmptyStorageContainer -Name $containerName -StorageAccount $sreStorageAccount } From 3efe4bcca065b515d26494b8acba8fe3b85b5cf5 Mon Sep 17 00:00:00 2001 From: Oliver Strickson Date: Mon, 1 Jun 2020 19:17:24 +0100 Subject: [PATCH 070/155] Log file to match script name --- .../cloud_init/scripts/zipfile_to_gitlab_project.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py b/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py index 9c80e4cb09..60119e921b 100644 --- a/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py +++ b/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py @@ -14,7 +14,7 @@ logger.setLevel(logging.INFO) formatter = logging.Formatter("%(asctime)s [%(levelname)s] %(message)s") f_handler = RotatingFileHandler( - "upload_zipfiles_to_projects.log", maxBytes=5 * 1024 * 1024, backupCount=10 + "zipfile_to_gitlab_project.log", maxBytes=5 * 1024 * 1024, backupCount=10 ) f_handler.setFormatter(formatter) c_handler = logging.StreamHandler() From 1c927b82404acd12a8ee482beaa49de3d2ddef89 Mon Sep 17 00:00:00 2001 From: Jack Roberts Date: Tue, 2 Jun 2020 08:40:02 +0100 Subject: [PATCH 071/155] start refactoring gitlab secrets files --- .../cloud-init-gitlab-external.template.yaml | 46 +++++++------------ 1 file changed, 16 insertions(+), 30 deletions(-) diff --git a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml index 54e8662158..49ed0043e0 100644 --- a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml +++ b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml @@ -54,38 +54,23 @@ write_files: EOS git_data_dirs({ "default" => { "path" => "/datadrive/gitdata" } }) # Secrets for Gitlab Internal and External Access - - path: "/home//.secrets/gitlab-internal-api-token" + - path: "/home//.secrets/gitlab-config.json" permissions: "0600" content: | - - - path: "/home//.secrets/gitlab-internal-ip-address" - permissions: "0600" - content: | - - - path: "/home//.secrets/gitlab-internal-username" - permissions: "0600" - content: | - - - path: "/home//.secrets/gitlab-internal-user-email" - permissions: "0600" - content: | - @ - - path: "/home//.secrets/gitlab-external-api-token" - permissions: "0600" - content: | - - - path: "/home//.secrets/gitlab-external-ip-address" - permissions: "0600" - content: | - - - path: "/home//.secrets/gitlab-external-username" - permissions: "0600" - content: | - - - path: "/home//.secrets/gitlab-external-user-email" - permissions: "0600" - content: | - @ + { + "GITLAB_INTERNAL": { + "ip_address": "", + "username": "", + "user_email": "@", + "api_token": "" + }, + "GITLAB_EXTERNAL": { + "ip_address": "", + "username": "", + "user_email": "@", + "api_token": "" + } + } # Script for creating projects and merge requests on gitlab-external - path: "/home//zipfile_to_gitlab_project.py" permissions: "0755" @@ -164,6 +149,7 @@ runcmd: - | attempt_counter=0 max_attempts=60 + echo echo "Waiting for GitLab OK health check" until [ "$(curl -s localhost/-/health)" = "GitLab OK" ] do From c8619c7474fbc02aba97791b93fe8f7fd33b2f74 Mon Sep 17 00:00:00 2001 From: Jack Roberts Date: Tue, 2 Jun 2020 11:32:07 +0100 Subject: [PATCH 072/155] Refactor gitlab credentials files and functions --- .../cloud-init-gitlab-external.template.yaml | 37 ++++----- .../cloud-init-gitlab-internal.template.yaml | 1 - .../scripts/check_merge_requests.py | 60 +++----------- .../cloud_init/scripts/gitlab_config.py | 80 +++++++++++++++++++ .../scripts/zipfile_to_gitlab_project.py | 19 +---- .../setup/Setup_SRE_WebApp_Servers.ps1 | 4 +- 6 files changed, 113 insertions(+), 88 deletions(-) create mode 100755 deployment/secure_research_environment/cloud_init/scripts/gitlab_config.py diff --git a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml index 677853758a..072c02f00a 100644 --- a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml +++ b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml @@ -17,7 +17,6 @@ packages: apt: # Preserves the existing /etc/apt/sources.list preserve_sources_list: true - # Add repositories sources: gitlab.list: @@ -53,24 +52,28 @@ write_files: last_name: 'sn' EOS git_data_dirs({ "default" => { "path" => "/datadrive/gitdata" } }) - # Secrets for Gitlab Internal and External Access + # Secrets for Gitlab Internal and External Access, and script to get them - path: "/home//.secrets/gitlab-config.json" permissions: "0600" content: | { - "GITLAB_INTERNAL": { + "GITLAB-INTERNAL": { "ip_address": "", "username": "", "user_email": "@", "api_token": "" }, - "GITLAB_EXTERNAL": { + "GITLAB-EXTERNAL": { "ip_address": "", "username": "", "user_email": "@", "api_token": "" } } + - path: "/home//gitlab_config.py" + permissions: "0755" + content: | + # Script for creating projects and merge requests on gitlab-external - path: "/home//zipfile_to_gitlab_project.py" permissions: "0755" @@ -137,20 +140,13 @@ runcmd: mkdir -p /home//.ssh ssh-keygen -t ed25519 -C 'gitlab' -N '' -f /home//.ssh/id_ed25519 # -------------------------------- - # REGISTER SSH KEY WITH GITLAB INTERNAL + # GITLAB INTERNAL SSH SETUP # -------------------------------- - | - chown : "/home//.secrets/gitlab-internal-api-token"; - chown : "/home//.secrets/gitlab-internal-ip-address"; - chown : "/home//.secrets/gitlab-internal-username"; - chown : "/home//.secrets/gitlab-internal-user-email"; - # Create SSH key for gitlab internal access, add gitlab internal to known hosts - - | - key=$(cat /home//.ssh/id_ed25519.pub); - curl --header "Authorization: Bearer $(cat '/home//.secrets/gitlab-internal-api-token')" --header 'Content-Type:application/json' --data "{\"key\": \"$key\", \"title\": \"InternalAPIUser\"}" /api/v4/user/keys; - ssh-keyscan -H >> /home//.ssh/known_hosts; - chown : "/home//.ssh/known_hosts" - + key=$(cat /home//.ssh/id_ed25519.pub) + token=$(/home//gitlab_config.py --server GITLAB-INTERNAL --value api_token) + curl --header "Authorization: Bearer $token" --header 'Content-Type:application/json' --data "{\"key\": \"$key\", \"title\": \"InternalAPIUser\"}" /api/v4/user/keys + ssh-keyscan -H >> /home//.ssh/known_hosts # -------------------------------- # WAIT FOR GITLAB EXTERNAL HEALTH CHECK # -------------------------------- @@ -178,7 +174,8 @@ runcmd: # Create SSH key for gitlab external access - | key=$(cat /home//.ssh/id_ed25519.pub); - curl --header "Authorization: Bearer $(cat '/home//.secrets/gitlab-external-api-token')" --header 'Content-Type:application/json' --data "{\"key\": \"$key\", \"title\": \"ExternalAPIUser\"}" /api/v4/user/keys; + token=$(/home//gitlab_config.py --server GITLAB-EXTERNAL --value api_token) + curl --header "Authorization: Bearer $token" --header 'Content-Type:application/json' --data "{\"key\": \"$key\", \"title\": \"ExternalAPIUser\"}" /api/v4/user/keys # Get local ssh host keys, add them to known hosts under the gitlab external ip - | echo " $(cat /etc/ssh/ssh_host_rsa_key.pub | cut -d " " -f -2)" >> /home//.ssh/known_hosts @@ -186,9 +183,9 @@ runcmd: echo " $(cat /etc/ssh/ssh_host_ecdsa_key.pub | cut -d " " -f -2)" >> /home//.ssh/known_hosts # Create groups for storing unapproved and approval repos - | - curl --header "Authorization: Bearer $(cat '/home//.secrets/gitlab-external-api-token')" --data "name=approval&path=approval&visibility=internal" /api/v4/groups; - curl --header "Authorization: Bearer $(cat '/home//.secrets/gitlab-external-api-token')" --data "name=unapproved&path=unapproved&visibility=internal" /api/v4/groups - + token=$(/home//gitlab_config.py --server GITLAB-EXTERNAL --value api_token) + curl --header "Authorization: Bearer $token" --data "name=approval&path=approval&visibility=internal" /api/v4/groups + curl --header "Authorization: Bearer $token" --data "name=unapproved&path=unapproved&visibility=internal" /api/v4/groups # -------------------------------- # GIT SETUP # -------------------------------- diff --git a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-internal.template.yaml b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-internal.template.yaml index 9aec531eaa..d1dc52fa9b 100644 --- a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-internal.template.yaml +++ b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-internal.template.yaml @@ -16,7 +16,6 @@ packages: apt: # Preserves the existing /etc/apt/sources.list preserve_sources_list: true - # Add repositories sources: gitlab.list: diff --git a/deployment/secure_research_environment/cloud_init/scripts/check_merge_requests.py b/deployment/secure_research_environment/cloud_init/scripts/check_merge_requests.py index 2838bc394f..c4a259d843 100644 --- a/deployment/secure_research_environment/cloud_init/scripts/check_merge_requests.py +++ b/deployment/secure_research_environment/cloud_init/scripts/check_merge_requests.py @@ -26,6 +26,7 @@ from pathlib import Path import logging from logging.handlers import RotatingFileHandler +from gitlab_config import get_api_config # Setup logging to console and file. File uses RotatingFileHandler to create # logs over a rolling window, 10 files each max 5 MB in size. @@ -72,7 +73,7 @@ def internal_project_exists(repo_name, config): # Does repo_name exist on GITLAB-INTERNAL? response = requests.get( - config["api_url"] + "projects/" + repo_path_encoded, headers=config["headers"] + config["api_url"] + "/projects/" + repo_path_encoded, headers=config["headers"] ) if response.status_code == 404: @@ -117,7 +118,7 @@ def internal_update_repo(git_url, repo_name, branch_name, config): if not project_exists: print("Creating: " + repo_name) response = requests.post( - config["api_url"] + "projects", + config["api_url"] + "/projects", headers=config["headers"], data={"name": repo_name, "path": repo_name, "visibility": "public"}, ) @@ -203,45 +204,6 @@ def put_request(endpoint, headers, params=None): ) -def get_gitlab_config(server="external"): - """Get gitlab server details and user account secrets - - Parameters - ---------- - server : str, optional - Which server to get secrets for either "internal" or, by default "external" - - Returns - ------- - dict - Secrets api_url, api_token, ip and headers. - - Raises - ------ - ValueError - If server is not a supported value - """ - home = str(Path.home()) - - if server == "external": - with open(f"{home}/.secrets/gitlab-external-ip-address", "r") as f: - ip = f.readlines()[0].strip() - with open(f"{home}/.secrets/gitlab-external-api-token", "r") as f: - token = f.readlines()[0].strip() - elif server == "internal": - with open(f"{home}/.secrets/gitlab-internal-ip-address", "r") as f: - ip = f.readlines()[0].strip() - with open(f"{home}/.secrets/gitlab-internal-api-token", "r") as f: - token = f.readlines()[0].strip() - else: - raise ValueError("Server must be external or internal") - - api_url = f"http://{ip}/api/v4/" - headers = {"Authorization": "Bearer " + token} - - return {"api_url": api_url, "api_token": token, "ip": ip, "headers": headers} - - def get_group_id(group_name, config): """Get the ID of a group on a gitlab server. @@ -262,7 +224,7 @@ def get_group_id(group_name, config): ValueError If group_name not found in the groups returned from the gitlab server. """ - endpoint = config["api_url"] + "groups" + endpoint = config["api_url"] + "/groups" response = get_request(endpoint, headers=config["headers"]) for group in response: if group["name"] == group_name: @@ -285,7 +247,7 @@ def get_project(project_id, config): dict Project JSON as returned by the gitlab API. """ - endpoint = config["api_url"] + f"projects/{project_id}" + endpoint = config["api_url"] + f"/projects/{project_id}" project = get_request(endpoint, headers=config["headers"]) return project @@ -336,7 +298,7 @@ def count_unresolved_mr_discussions(mr, config): project_id = mr["project_id"] mr_iid = mr["iid"] endpoint = ( - config["api_url"] + f"projects/{project_id}/merge_requests/{mr_iid}/discussions" + config["api_url"] + f"/projects/{project_id}/merge_requests/{mr_iid}/discussions" ) discussions = get_request(endpoint, headers=config["headers"]) if len(discussions) == 0: @@ -370,7 +332,7 @@ def accept_merge_request(mr, config): project_id = mr["project_id"] mr_iid = mr["iid"] endpoint = ( - config["api_url"] + f"projects/{project_id}/merge_requests/{mr_iid}/merge" + config["api_url"] + f"/projects/{project_id}/merge_requests/{mr_iid}/merge" ) return put_request(endpoint, headers=config["headers"]) @@ -383,17 +345,17 @@ def check_merge_requests(): logger.info(f"STARTING RUN") try: - config_external = get_gitlab_config(server="external") - config_internal = get_gitlab_config(server="internal") + config_external = get_api_config(server="GITLAB-EXTERNAL") + config_internal = get_api_config(server="GITLAB-INTERNAL") except Exception as e: logger.critical(f"Failed to load gitlab secrets: {e}") return try: internal_status = requests.get( - config_internal["api_url"] + "projects", + config_internal["api_url"] + "/projects", headers=config_internal["headers"], - timeout=5, + timeout=10, ) if not internal_status.ok: logger.critical( diff --git a/deployment/secure_research_environment/cloud_init/scripts/gitlab_config.py b/deployment/secure_research_environment/cloud_init/scripts/gitlab_config.py new file mode 100755 index 0000000000..540865bb2b --- /dev/null +++ b/deployment/secure_research_environment/cloud_init/scripts/gitlab_config.py @@ -0,0 +1,80 @@ +#!/usr/bin/env python3 + +import json +import argparse +from pathlib import Path + + +def get_gitlab_config(file=None, server=None, value=None): + """Get GitLab server details and user secrets. + + Parameters + ---------- + file : str, optional + Path to configuration file, by default None which resolves to + .secrets/gitlab-config.json in the user's home directory. + server : str, optional + Name of the server to get details for (must match format in config file), + by default None which returns alls ervers. + value : str, optional + Name of the configuration value to return, by default None which returns + all parameters. + + Returns + ------- + dict or str + If server and value are not None, str of the requested value. If only + server or neither specified, dict of all the relevant values. + """ + if file is None: + file = f"{Path.home()}/.secrets/gitlab-config.json" + + with open(file, "r") as f: + config = json.load(f) + + if server is None and value is None: + return config + elif value is None: + return config[server] + elif server is None: + raise ValueError("If value is given, server must also be given.") + else: + return config[server][value] + + +def get_api_config(server, file=None): + """Construct API URL, headers and other settings. + + Parameters + ---------- + server : str + Which server to get secrets for (name present in config file). + file : str + Path to configuration file, by default None which resolves to + .secrets/gitlab-config.json in the user's home directory. + + Returns + ------- + dict + Secrets api_url, api_token, ip and headers. + """ + config = get_gitlab_config(file=file, server=server, value=None) + + ip = config["ip_address"] + token = config["api_token"] + api_url = f"http://{ip}/api/v4" + headers = {"Authorization": "Bearer " + token} + + return {"api_url": api_url, "api_token": token, "ip": ip, "headers": headers} + + +if __name__ == "__main__": + parser = argparse.ArgumentParser(description="Get GitLab configuration values.") + parser.add_argument("--file", help="Location of config file.", default=None) + parser.add_argument( + "--server", help="Name of server to get config for.", default=None + ) + parser.add_argument("--value", help="Configuration value to get.", default=None) + args = parser.parse_args() + + print(get_gitlab_config(file=args.file, server=args.server, value=args.value)) diff --git a/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py b/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py index 60119e921b..6c841fba30 100644 --- a/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py +++ b/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py @@ -9,6 +9,7 @@ from pathlib import Path import logging from logging.handlers import RotatingFileHandler +from gitlab_config import get_api_config logger = logging.getLogger("project_upload_logger") logger.setLevel(logging.INFO) @@ -59,20 +60,6 @@ def unzip_zipfiles(zipfile_dir, tmp_unzipped_dir): return output_list -def get_gitlab_config(): - home = str(Path.home()) - - with open(f"{home}/.secrets/gitlab-external-ip-address", "r") as f: - ip = f.readlines()[0].strip() - - with open(f"{home}/.secrets/gitlab-external-api-token", "r") as f: - token = f.readlines()[0].strip() - api_url = f"http://{ip}/api/v4/" - headers = {"Authorization": "Bearer " + token} - - return {"api_url": api_url, "api_token": token, "ip": ip, "headers": headers} - - def get_group_namespace_ids( gitlab_url, gitlab_token, groups=["approval", "unapproved"] ): @@ -147,7 +134,7 @@ def get_project_id(repo_name, namespace_id, gitlab_url, gitlab_token): def create_project(repo_name, namespace_id, gitlab_url, gitlab_token): - projects_url = "{}projects/".format(gitlab_url) + projects_url = "{}/projects/".format(gitlab_url) response = requests.post( projects_url, headers={"Authorization": "Bearer " + gitlab_token}, @@ -560,7 +547,7 @@ def main(): os.makedirs(TMP_REPO_DIR) # get the gitlab config - config = get_gitlab_config() + config = get_api_config("GITLAB-EXTERNAL") # unzip the zipfiles, and retrieve a list of tuples describing # (repo_name, commit_hash, desired_branch, unzipped_location) diff --git a/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 b/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 index da3183cda9..f95d049f02 100644 --- a/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 +++ b/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 @@ -222,8 +222,8 @@ $gitlabExternalCloudInitTemplate = $gitlabExternalCloudInitTemplate.Replace("${i # ------------------------------------------- $indent = " " foreach ($scriptName in @("zipfile_to_gitlab_project.py", - "check_merge_requests.py")) { - + "check_merge_requests.py", + "gitlab_config.py")) { $raw_script = Get-Content (Join-Path $PSScriptRoot ".." "cloud_init" "scripts" $scriptName) -Raw $indented_script = $raw_script -split "`n" | ForEach-Object { "${indent}$_" } | Join-String -Separator "`n" $gitlabExternalCloudInitTemplate = $gitlabExternalCloudInitTemplate.Replace("${indent}<$scriptName>", $indented_script) From 1b703e53f0e57cd308beec03899a05125ee5e17f Mon Sep 17 00:00:00 2001 From: Jack Roberts Date: Tue, 2 Jun 2020 11:53:45 +0100 Subject: [PATCH 073/155] black python scripts --- .../scripts/check_merge_requests.py | 3 +- .../cloud_init/scripts/gitlab_config.py | 2 +- .../scripts/zipfile_to_gitlab_project.py | 30 +++++++++++-------- 3 files changed, 21 insertions(+), 14 deletions(-) diff --git a/deployment/secure_research_environment/cloud_init/scripts/check_merge_requests.py b/deployment/secure_research_environment/cloud_init/scripts/check_merge_requests.py index c4a259d843..8936490a43 100644 --- a/deployment/secure_research_environment/cloud_init/scripts/check_merge_requests.py +++ b/deployment/secure_research_environment/cloud_init/scripts/check_merge_requests.py @@ -298,7 +298,8 @@ def count_unresolved_mr_discussions(mr, config): project_id = mr["project_id"] mr_iid = mr["iid"] endpoint = ( - config["api_url"] + f"/projects/{project_id}/merge_requests/{mr_iid}/discussions" + config["api_url"] + + f"/projects/{project_id}/merge_requests/{mr_iid}/discussions" ) discussions = get_request(endpoint, headers=config["headers"]) if len(discussions) == 0: diff --git a/deployment/secure_research_environment/cloud_init/scripts/gitlab_config.py b/deployment/secure_research_environment/cloud_init/scripts/gitlab_config.py index 540865bb2b..21f175c585 100755 --- a/deployment/secure_research_environment/cloud_init/scripts/gitlab_config.py +++ b/deployment/secure_research_environment/cloud_init/scripts/gitlab_config.py @@ -59,7 +59,7 @@ def get_api_config(server, file=None): Secrets api_url, api_token, ip and headers. """ config = get_gitlab_config(file=file, server=server, value=None) - + ip = config["ip_address"] token = config["api_token"] api_url = f"http://{ip}/api/v4" diff --git a/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py b/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py index 6c841fba30..e31e81e881 100644 --- a/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py +++ b/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py @@ -143,7 +143,7 @@ def create_project(repo_name, namespace_id, gitlab_url, gitlab_token): "path": repo_name, "visibility": "internal", "namespace_id": namespace_id, - "default_branch": "_gitlab_ingress_review" + "default_branch": "_gitlab_ingress_review", }, ) assert response.json()["name"] == repo_name @@ -198,7 +198,9 @@ def create_project(repo_name, namespace_id, gitlab_url, gitlab_token): commit history. """ # Make the first commit to the project with the README - project_commit_url = f"{gitlab_url}/projects/{project_info['id']}/repository/commits" + project_commit_url = ( + f"{gitlab_url}/projects/{project_info['id']}/repository/commits" + ) response = requests.post( project_commit_url, headers={"Authorization": "Bearer " + gitlab_token}, @@ -206,13 +208,9 @@ def create_project(repo_name, namespace_id, gitlab_url, gitlab_token): "branch": "_gitlab_ingress_review", "commit_message": "Initial commit", "actions": [ - { - "action": "create", - "file_path": "README.md", - "content": README - } - ] - } + {"action": "create", "file_path": "README.md", "content": README} + ], + }, ) return project_info @@ -236,8 +234,11 @@ def check_if_branch_exists(branch_name, project_id, gitlab_url, gitlab_token): def create_branch( - branch_name, project_id, gitlab_url, gitlab_token, - reference_branch="_gitlab_ingress_review" + branch_name, + project_id, + gitlab_url, + gitlab_token, + reference_branch="_gitlab_ingress_review", ): # assume branch doesn't already exist - create it! branch_url = "{}/projects/{}/repository/branches".format(gitlab_url, project_id) @@ -460,7 +461,12 @@ def unzipped_repo_to_merge_request( ) if not branch_exists: clone_commit_and_push( - repo_name, unzipped_location, tmp_repo_dir, src_branch_name, remote_url, commit_hash + repo_name, + unzipped_location, + tmp_repo_dir, + src_branch_name, + remote_url, + commit_hash, ) logger.info( "Pushed to {}/{} branch {}".format( From 72759235ff02ec654b8aeb0a06308e30d991acdc Mon Sep 17 00:00:00 2001 From: Jack Roberts Date: Tue, 2 Jun 2020 13:48:29 +0100 Subject: [PATCH 074/155] add missing file path argument --- .../cloud_init/cloud-init-gitlab-external.template.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml index 072c02f00a..2fa2d86fc7 100644 --- a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml +++ b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml @@ -144,7 +144,7 @@ runcmd: # -------------------------------- - | key=$(cat /home//.ssh/id_ed25519.pub) - token=$(/home//gitlab_config.py --server GITLAB-INTERNAL --value api_token) + token=$(/home//gitlab_config.py --server GITLAB-INTERNAL --value api_token --file /home//.secrets/gitlab-config.json) curl --header "Authorization: Bearer $token" --header 'Content-Type:application/json' --data "{\"key\": \"$key\", \"title\": \"InternalAPIUser\"}" /api/v4/user/keys ssh-keyscan -H >> /home//.ssh/known_hosts # -------------------------------- @@ -174,7 +174,7 @@ runcmd: # Create SSH key for gitlab external access - | key=$(cat /home//.ssh/id_ed25519.pub); - token=$(/home//gitlab_config.py --server GITLAB-EXTERNAL --value api_token) + token=$(/home//gitlab_config.py --server GITLAB-EXTERNAL --value api_token --file /home//.secrets/gitlab-config.json) curl --header "Authorization: Bearer $token" --header 'Content-Type:application/json' --data "{\"key\": \"$key\", \"title\": \"ExternalAPIUser\"}" /api/v4/user/keys # Get local ssh host keys, add them to known hosts under the gitlab external ip - | @@ -183,7 +183,7 @@ runcmd: echo " $(cat /etc/ssh/ssh_host_ecdsa_key.pub | cut -d " " -f -2)" >> /home//.ssh/known_hosts # Create groups for storing unapproved and approval repos - | - token=$(/home//gitlab_config.py --server GITLAB-EXTERNAL --value api_token) + token=$(/home//gitlab_config.py --server GITLAB-EXTERNAL --value api_token --file /home//.secrets/gitlab-config.json) curl --header "Authorization: Bearer $token" --data "name=approval&path=approval&visibility=internal" /api/v4/groups curl --header "Authorization: Bearer $token" --data "name=unapproved&path=unapproved&visibility=internal" /api/v4/groups # -------------------------------- From 77884e88b6f4c9e47ac1e1d2860e0d1a5ab1f4b9 Mon Sep 17 00:00:00 2001 From: Jack Roberts Date: Tue, 2 Jun 2020 14:39:50 +0100 Subject: [PATCH 075/155] fix not updated secrets file location --- .../cloud_init/cloud-init-gitlab-external.template.yaml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml index 2fa2d86fc7..4be96378f1 100644 --- a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml +++ b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml @@ -129,7 +129,8 @@ runcmd: echo "user = User.create(:username => '', :password => '', :password_confirmation => '', :email =>'@', :skip_confirmation => true, :name => '');user.save!;exit;" | gitlab-rails console -e production # Create a API token for the ingress user created above - | - echo "user = User.find_by(username: '');user.personal_access_tokens.create(name: 'apitoken', token_digest: Gitlab::CryptoHelper.sha256('$(cat '/home//.secrets/gitlab-external-api-token')'), impersonation: false, scopes: [:api]);exit;" | gitlab-rails console -e production + token=$(/home//gitlab_config.py --server GITLAB-EXTERNAL --value api_token --file /home//.secrets/gitlab-config.json) + echo "user = User.find_by(username: '');user.personal_access_tokens.create(name: 'apitoken', token_digest: Gitlab::CryptoHelper.sha256('$token'), impersonation: false, scopes: [:api]);exit;" | gitlab-rails console -e production # Reload GitLab configuration and restart GitLab - gitlab-ctl reconfigure - gitlab-ctl restart From f6ec3facae94b1f8caf39a9f8c62501c54983382 Mon Sep 17 00:00:00 2001 From: Oliver Strickson Date: Wed, 3 Jun 2020 22:07:33 +0100 Subject: [PATCH 076/155] Changes to creating merge request from unzipped repo - refer to "unzipped repo" as "snapshot" (there is no .git) - use branch named "commit-COMMIT_SHA" as the source of the MR - this now has its base commit as the latest commit of the target branch in 'approval' - Refactor functions that check and create resources (add idempotent variants) --- .../scripts/zipfile_to_gitlab_project.py | 247 ++++++++++-------- 1 file changed, 136 insertions(+), 111 deletions(-) diff --git a/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py b/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py index e31e81e881..5757ee9d5a 100644 --- a/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py +++ b/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py @@ -104,7 +104,7 @@ def check_if_project_exists(repo_name, namespace_id, gitlab_url, gitlab_token): return False -def get_project_info(repo_name, namespace_id, gitlab_url, gitlab_token): +def get_or_create_project(repo_name, namespace_id, gitlab_url, gitlab_token): already_exists = check_if_project_exists( repo_name, namespace_id, gitlab_url, gitlab_token ) @@ -121,18 +121,6 @@ def get_project_info(repo_name, namespace_id, gitlab_url, gitlab_token): return project_info -def get_project_remote_url(repo_name, namespace_id, gitlab_url, gitlab_token): - project_info = get_project_info(repo_name, namespace_id, gitlab_url, gitlab_token) - - return project_info["ssh_url_to_repo"] - - -def get_project_id(repo_name, namespace_id, gitlab_url, gitlab_token): - project_info = get_project_info(repo_name, namespace_id, gitlab_url, gitlab_token) - - return project_info["id"] - - def create_project(repo_name, namespace_id, gitlab_url, gitlab_token): projects_url = "{}/projects/".format(gitlab_url) response = requests.post( @@ -256,6 +244,43 @@ def create_branch( return branch_info +def create_branch_if_not_exists( + branch_name, + project_id, + gitlab_url, + gitlab_token, + log_project_info, + reference_branch="_gitlab_ingress_review", +): + branch_exists = check_if_branch_exists( + branch_name, + project_id, + gitlab_url, + gitlab_token, + ) + if not branch_exists: + branch_info = create_branch( + branch_name, + project_id, + gitlab_url, + gitlab_token, + reference_branch, + ) + assert branch_info["name"] == branch_name + logger.info( + "{} branch {} created".format( + log_project_info, branch_name + ) + ) + else: + logger.info( + "{} branch {} already exists".format( + log_project_info, branch_name + ) + ) + + + def check_if_merge_request_exists( source_branch, target_project_id, target_branch, gitlab_url, gitlab_token ): @@ -277,7 +302,7 @@ def check_if_merge_request_exists( source_branch, target_branch ) ) - return True + return mr return False @@ -317,7 +342,7 @@ def create_merge_request( "title": title, }, ) - if response.status_code != 201: + if (response.status_code != 201): # raise RuntimeError("Problem creating Merge Request {} {} {}: {}"\ # .format(repo_name, source_branch,target_branch, # response.content)) @@ -333,13 +358,73 @@ def create_merge_request( return mr_info +def create_merge_request_if_not_exists( + repo_name, + source_project_id, + source_branch, + target_project_id, + target_branch, + gitlab_url, + gitlab_token, +): + mr_exists = check_if_merge_request_exists( + source_branch, + target_project_id, + target_branch, + gitlab_url, + gitlab_token + ) + + if mr_exists: + logger.info( + "Merge Request for {} {} to {} already exists".format( + repo_name, source_branch, target_branch + ) + ) + mr_info = mr_exists + else: + mr_info = create_merge_request( + repo_name, + source_project_id, + source_branch, + target_project_id, + target_branch, + gitlab_url, + gitlab_token + ) + logger.info( + "Created merge request {} -> {}".format(source_branch, target_branch) + ) + return mr_info + + def clone_commit_and_push( - repo_name, path_to_unzipped_repo, tmp_repo_dir, branch_name, remote_url, commit_hash + repo_name, path_to_unzipped_repo, tmp_repo_dir, branch_name, + target_branch_name, remote_url, target_project_url, commit_hash ): # Clone the repo subprocess.run(["git", "clone", remote_url], cwd=tmp_repo_dir, check=True) working_dir = os.path.join(tmp_repo_dir, repo_name) assert os.path.exists(working_dir) + + # Add upstream (target repo) to this repo + subprocess.run(["git", "remote", "add", "approval", target_project_url], cwd=working_dir, check=True) + subprocess.run(["git", "fetch", "approval"], cwd=working_dir, check=True) + + # Checkout the branch with the requested name (creating it at the + # current commit of the default branch if it doesn't exist) + git_checkout_result = subprocess.run(["git", "checkout", target_branch_name], cwd=working_dir) + if git_checkout_result.returncode == 0: + subprocess.run(["git", "pull", "approval"], cwd=working_dir, check=True) + + # now checkout the branch holding the snapshot + subprocess.run(["git", "checkout", "-b", branch_name], cwd=working_dir, check=True) + + # Remove the contents of the cloned repo (everything except .git) + for item in os.listdir(working_dir): + if item != ".git": + subprocess.run(["rm", "-rf", item], cwd=working_dir, check=True) + # Copy the unzipped repo contents into our cloned (empty) repo for item in os.listdir(path_to_unzipped_repo): subprocess.run( @@ -347,19 +432,20 @@ def clone_commit_and_push( cwd=working_dir, check=True, ) - # Create the branch with the requested name - subprocess.run(["git", "checkout", "-b", branch_name], cwd=working_dir, check=True) + # Commit everything to this branch, also putting commit hash into message subprocess.run(["git", "add", "."], cwd=working_dir, check=True) commit_msg = "Import snapshot of {} at commit {}".format(remote_url, commit_hash) subprocess.run(["git", "commit", "-m", commit_msg], cwd=working_dir, check=True) - # Push back to gitlab external + # Push back to gitlab external (unapproved) subprocess.run( - ["git", "push", "--set-upstream", "origin", branch_name], + ["git", "push", "-f", "--set-upstream", "origin", branch_name], cwd=working_dir, check=True, ) + logger.info("Pushed to {} branch {}".format(remote_url, branch_name)) + def fork_project(repo_name, project_id, namespace_id, gitlab_url, gitlab_token): already_exists = check_if_project_exists( @@ -374,12 +460,13 @@ def fork_project(repo_name, project_id, namespace_id, gitlab_url, gitlab_token): ) if response.status_code != 201: raise RuntimeError("Problem creating fork: {}".format(response.content)) - new_project_id = response.json()["id"] + new_project_info = response.json()#["id"] else: # project already exists - ensure it is a fork of 'approval/' - new_project_id = get_project_id( + new_project_info = get_or_create_project( repo_name, namespace_id, gitlab_url, gitlab_token ) + new_project_id = new_project_info["id"] fork_url = "{}/projects/{}/fork/{}".format( gitlab_url, new_project_id, project_id ) @@ -393,122 +480,65 @@ def fork_project(repo_name, project_id, namespace_id, gitlab_url, gitlab_token): response.status_code, response.content ) ) - return new_project_id + return new_project_info -def unzipped_repo_to_merge_request( - repo_details, tmp_repo_dir, gitlab_config, namespace_ids, group_names +def unzipped_snapshot_to_merge_request( + shapshot_details, tmp_repo_dir, gitlab_config, namespace_ids, group_names ): # unpack tuple - repo_name, commit_hash, target_branch_name, unzipped_location = repo_details + repo_name, commit_hash, target_branch_name, unzipped_location = shapshot_details logger.info("Unpacked {} {} {}".format(repo_name, commit_hash, target_branch_name)) # create project on approved repo if not already there - this func will do that - target_project_id = get_project_id( + target_project_info = get_or_create_project( repo_name, namespace_ids[group_names[1]], gitlab_config["api_url"], gitlab_config["api_token"], ) + target_project_id = target_project_info["id"] + target_project_url = target_project_info["ssh_url_to_repo"] logger.info("Created project {}/{} ".format(group_names[1], repo_name)) # Branch to create on the source (unapproved) repository of the # matches that of the target - src_branch_name = target_branch_name - - # Check if we already have a Merge Request - if so we can just skip to the end - mr_exists = check_if_merge_request_exists( - src_branch_name, - target_project_id, - target_branch_name, - gitlab_config["api_url"], - gitlab_config["api_token"], - ) - if mr_exists: - logger.info( - "Merge Request for {} {} to {} already exists - skipping".format( - repo_name, src_branch_name, target_branch_name - ) - ) - return - - # If we got here, MR doesn't already exist - go through the rest of the steps. + src_branch_name = f"commit-{commit_hash}" # Fork this project to "unapproved" group - src_project_id = fork_project( + src_project_info = fork_project( repo_name, target_project_id, namespace_ids[group_names[0]], gitlab_config["api_url"], gitlab_config["api_token"], ) - - logger.info("Forked to project {}/{}".format(group_names[0], repo_name)) - # Get the remote URL for the unapproved project - remote_url = get_project_remote_url( - repo_name, - namespace_ids[group_names[0]], - gitlab_config["api_url"], - gitlab_config["api_token"], - ) + src_project_id = src_project_info['id'] + remote_url = src_project_info['ssh_url_to_repo'] + logger.info("Fork of project at {}/{}".format(group_names[0], repo_name)) # Do the command-line git stuff to push to unapproved project - - branch_exists = check_if_branch_exists( + clone_commit_and_push( + repo_name, + unzipped_location, + tmp_repo_dir, src_branch_name, - src_project_id, - gitlab_config["api_url"], - gitlab_config["api_token"], + target_branch_name, + remote_url, + target_project_url, + commit_hash, ) - if not branch_exists: - clone_commit_and_push( - repo_name, - unzipped_location, - tmp_repo_dir, - src_branch_name, - remote_url, - commit_hash, - ) - logger.info( - "Pushed to {}/{} branch {}".format( - group_names[0], repo_name, src_branch_name - ) - ) - else: - logger.info( - "{}/{} branch {} already exists".format( - group_names[0], repo_name, src_branch_name - ) - ) # Create the branch on the "approval" project if it doesn't already exist - branch_exists = check_if_branch_exists( + create_branch_if_not_exists( target_branch_name, target_project_id, gitlab_config["api_url"], gitlab_config["api_token"], + "{} / {}".format(group_names[1], repo_name), ## for logging ) - if not branch_exists: - branch_info = create_branch( - target_branch_name, - target_project_id, - gitlab_config["api_url"], - gitlab_config["api_token"], - ) - assert branch_info["name"] == target_branch_name - logger.info( - "{}/{} branch {} created".format( - group_names[1], repo_name, target_branch_name - ) - ) - else: - logger.info( - "{}/{} branch {} already exists".format( - group_names[1], repo_name, target_branch_name - ) - ) # Create the merge request - create_merge_request( + create_merge_request_if_not_exists( repo_name, src_project_id, src_branch_name, @@ -517,11 +547,6 @@ def unzipped_repo_to_merge_request( gitlab_config["api_url"], gitlab_config["api_token"], ) - logger.info( - "Created merge request {} -> {}".format(commit_hash, target_branch_name) - ) - - return True def cleanup(zipfile_dir, tmp_unzipped_dir, tmp_repo_dir): @@ -557,7 +582,7 @@ def main(): # unzip the zipfiles, and retrieve a list of tuples describing # (repo_name, commit_hash, desired_branch, unzipped_location) - unzipped_repos = unzip_zipfiles(ZIPFILE_DIR, TMP_UNZIPPED_DIR) + unzipped_snapshots = unzip_zipfiles(ZIPFILE_DIR, TMP_UNZIPPED_DIR) # get the namespace_ids of our "approval" and "unapproved" groups GROUPS = ["unapproved", "approval"] @@ -566,10 +591,10 @@ def main(): ) # loop over all our newly unzipped repositories - for repo_details in unzipped_repos: + for snapshot_details in unzipped_snapshots: # call function to go through all the project/branch/mr creation etc. - unzipped_repo_to_merge_request( - repo_details, TMP_REPO_DIR, config, namespace_ids, GROUPS + unzipped_snapshot_to_merge_request( + snapshot_details, TMP_REPO_DIR, config, namespace_ids, GROUPS ) # cleanup From 9416fbafb29ea760ce26565c9a4b5d8d9b8cc1a0 Mon Sep 17 00:00:00 2001 From: Oliver Strickson Date: Fri, 5 Jun 2020 15:39:59 +0100 Subject: [PATCH 077/155] Update docstrings (after some functions were renamed) --- .../scripts/zipfile_to_gitlab_project_doc.py | 41 +++++++------------ 1 file changed, 15 insertions(+), 26 deletions(-) diff --git a/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project_doc.py b/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project_doc.py index 48a103c987..baacd56517 100644 --- a/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project_doc.py +++ b/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project_doc.py @@ -38,11 +38,6 @@ __.zip """ -get_gitlab_config.__doc__ = """ -Return a dictionary containing the base URL for the gitlab API, -the API token, the IP address, and the headers to go in any request -""" - get_group_namespace_ids.__doc__ = """ Find the namespace_id corresponding to the groups we're interested in, e.g. 'approval' and 'unapproved'. @@ -63,7 +58,6 @@ Parameters ========== -namespace_id: int, ID of the group ("unapproved" or "approval") gitlab_url: str, base URL for the API gitlab_token: str, API token. @@ -88,7 +82,7 @@ bool, True if project exists, False otherwise. """ -get_project_info.__doc__ = """ +get_or_create_project.__doc__ = """ Check if project exists, and if so get its ID. Otherwise, create it and return the ID. @@ -104,24 +98,6 @@ project_info: dict, containing info from the projects API endpoint """ -get_project_id.__doc__ = """ -Given the name of a repository and namespace_id (i.e. group, -"unapproved" or "approval"), either return the remote URL for project -matching the repo name, or create it if it doesn't exist already, -and again return the remote URL. - -Parameters -========== -repo_name: str, name of the repository/project we're looking for. -namespace_id: int, the ID of the group ("unapproved" or "approval") -gitlab_url: str, base URL of the API -gitlab_token: str, API key - -Returns -======= -gitlab_project_url: str, the URL to be set as the "remote". -""" - create_project.__doc__ = """ Create empty project on gitlab, and return the corresponding remote URL. @@ -166,12 +142,22 @@ creating a new project or listing existing ones. gitlab_url: str, the base URL for the Gitlab API gitlab_token: str, the Gitlab API token +reference_branch: str, (default "_gitlab_ingress_review"), create the new +branch based on this branch Returns ======= branch_info: dict, info about the branch from API endpoint """ +create_branch_if_not_exists.__doc__ = """ +Idempotent form of `create_branch`. + +Additional argument, (between gitlab_token and reference_branch) +log_project_info: str, prefix to use for logging messages +(most likely the repo name) +""" + check_if_merge_request_exists.__doc__ = """ See if there is an existing merge request between the source and target project/branch combinations. @@ -215,6 +201,9 @@ mr_info: dict, the response from the API upon creating the Merge Request """ +create_merge_request_if_not_exists.__doc__ = """ +Idempotent form of `create_merge_request`. +""" clone_commit_and_push.__doc__ = """ Run shell commands to convert the unzipped directory containing the @@ -248,7 +237,7 @@ new_project_id: int, the id of the newly created 'unapproved/' project """ -unzipped_repo_to_merge_request = """ +unzipped_snapshot_to_merge_request = """ Go through all the steps for a single repo/project. Parameters From db81a4db575081ccc645f8c32f2e183dbf8304c5 Mon Sep 17 00:00:00 2001 From: Jack Roberts Date: Fri, 5 Jun 2020 17:03:39 +0100 Subject: [PATCH 078/155] Rename Gitlab and Hack MD servers GITLAB-INTERNAL-SRE- -> GITLAB- GITLAB-EXTERNAL-SRE- -> GITLAB-REVIEW- HACKMD-SRE- -> HACKMD- "Approval" group on GITLAB-EXTERNAL -> "Approved" group on GITLAB-REVIEW --- ...> SRE_Upload_Git_Repo_to_GitlabReview.ps1} | 6 +- deployment/common/Configuration.psm1 | 38 +++-- ...=> cloud-init-gitlab-review.template.yaml} | 98 ++++++------ ...e.yaml => cloud-init-gitlab.template.yaml} | 4 +- .../scripts/check_merge_requests.py | 94 ++++++----- .../scripts/zipfile_to_gitlab_project.py | 22 +-- .../scripts/zipfile_to_gitlab_project_doc.py | 38 ++--- .../setup/Setup_SRE_WebApp_Servers.ps1 | 150 +++++++++--------- .../full/sre_testasandbox_full_config.json | 44 +++-- 9 files changed, 240 insertions(+), 254 deletions(-) rename deployment/administration/{SRE_Upload_Git_Repo_to_GitlabExternal.ps1 => SRE_Upload_Git_Repo_to_GitlabReview.ps1} (96%) rename deployment/secure_research_environment/cloud_init/{cloud-init-gitlab-external.template.yaml => cloud-init-gitlab-review.template.yaml} (64%) rename deployment/secure_research_environment/cloud_init/{cloud-init-gitlab-internal.template.yaml => cloud-init-gitlab.template.yaml} (84%) diff --git a/deployment/administration/SRE_Upload_Git_Repo_to_GitlabExternal.ps1 b/deployment/administration/SRE_Upload_Git_Repo_to_GitlabReview.ps1 similarity index 96% rename from deployment/administration/SRE_Upload_Git_Repo_to_GitlabExternal.ps1 rename to deployment/administration/SRE_Upload_Git_Repo_to_GitlabReview.ps1 index bd3d0d0513..bbf24780f9 100644 --- a/deployment/administration/SRE_Upload_Git_Repo_to_GitlabExternal.ps1 +++ b/deployment/administration/SRE_Upload_Git_Repo_to_GitlabReview.ps1 @@ -54,7 +54,7 @@ Set-Location $workingDir # Upload the zip file to the VM, via blob storage # ----------------------------------------------- -$gitlabExternalVmName = $config.sre.webapps.gitlab.external.vmName +$gitlabReviewVmName = $config.sre.webapps.gitlabreview.vmName # Go via blob storage - first create storage account if not already there $storageResourceGroupName = $config.sre.storage.artifacts.rg $sreStorageAccountName = $config.sre.storage.artifacts.accountName @@ -89,8 +89,8 @@ chown -R ${sreAdminUsername}:${sreAdminUsername} /tmp/zipfiles/ "@ $resourceGroupName = $config.sre.webapps.rg -Add-LogMessage -Level Info "[ ] Running remote script to download zipfile onto $gitlabExternalVmName" -$result = Invoke-RemoteScript -Shell "UnixShell" -Script $script -VMName $gitlabExternalVmName -ResourceGroupName $resourceGroupName +Add-LogMessage -Level Info "[ ] Running remote script to download zipfile onto $gitlabReviewVmName" +$result = Invoke-RemoteScript -Shell "UnixShell" -Script $script -VMName $gitlabReviewVmName -ResourceGroupName $resourceGroupName # clean up - remove the zipfile from local machine. Add-LogMessage -Level Info "[ ] Removing original zipfile $zipFilePath" diff --git a/deployment/common/Configuration.psm1 b/deployment/common/Configuration.psm1 index 83cd00d10b..f5bd6e074a 100644 --- a/deployment/common/Configuration.psm1 +++ b/deployment/common/Configuration.psm1 @@ -401,14 +401,14 @@ function Add-SreConfig { gitlabLdapPassword = "$($config.sre.shortName)-gitlab-ldap-password" gitlabRootPassword = "$($config.sre.shortName)-gitlab-root-password" gitlabUserPassword = "$($config.sre.shortName)-gitlab-user-password" - gitlabInternalUsername = "$($config.sre.shortName)-gitlab-internal-username" - gitlabInternalPassword = "$($config.sre.shortName)-gitlab-internal-password" - gitlabInternalAPIToken = "$($config.sre.shortName)-gitlab-internal-api-token" + gitlabUsername = "$($config.sre.shortName)-gitlab-username" + gitlabPassword = "$($config.sre.shortName)-gitlab-password" + gitlabAPIToken = "$($config.sre.shortName)-gitlab-api-token" hackmdLdapPassword = "$($config.sre.shortName)-hackmd-ldap-password" hackmdUserPassword = "$($config.sre.shortName)-hackmd-user-password" - gitlabExternalUsername = "$($config.sre.shortName)-gitlab-external-username" - gitlabExternalPassword = "$($config.sre.shortName)-gitlab-external-password" - gitlabExternalAPIToken = "$($config.sre.shortName)-gitlab-external-api-token" + gitlabReviewUsername = "$($config.sre.shortName)-gitlab-review-username" + gitlabReviewPassword = "$($config.sre.shortName)-gitlab-review-password" + gitlabReviewAPIToken = "$($config.sre.shortName)-gitlab-review-api-token" letsEncryptCertificate = "$($config.sre.shortName)-lets-encrypt-certificate" npsSecret = "$($config.sre.shortName)-nps-secret" rdsAdminPassword = "$($config.sre.shortName)-rdsvm-admin-password" @@ -528,26 +528,24 @@ function Add-SreConfig { rg = "RG_SRE_WEBAPPS" nsg = "NSG_SRE_$($config.sre.id)_WEBAPPS".ToUpper() gitlab = [ordered]@{ - internal = [ordered]@{ - vmName = "GITLAB-INTERNAL-SRE-$($config.sre.id)".ToUpper() + vmName = "GITLAB-$($config.sre.id)".ToUpper() vmSize = "Standard_D2s_v3" - } - external = [ordered]@{ - vmName = "GITLAB-EXTERNAL-SRE-$($config.sre.id)".ToUpper() - vmSize = "Standard_D2s_v3" - } + } + gitlabreview = [ordered]@{ + vmName = "GITLAB-REVIEW-$($config.sre.id)".ToUpper() + vmSize = "Standard_D2s_v3" } hackmd = [ordered]@{ - vmName = "HACKMD-SRE-$($config.sre.id)".ToUpper() + vmName = "HACKMD-$($config.sre.id)".ToUpper() vmSize = "Standard_D2s_v3" } } - $config.sre.webapps.gitlab.internal.hostname = $config.sre.webapps.gitlab.internal.vmName - $config.sre.webapps.gitlab.internal.fqdn = "$($config.sre.webapps.gitlab.internal.hostname).$($config.shm.domain.fqdn)" - $config.sre.webapps.gitlab.internal.ip = "$($config.sre.network.subnets.data.prefix).151" - $config.sre.webapps.gitlab.external.hostname = $config.sre.webapps.gitlab.external.vmName - $config.sre.webapps.gitlab.external.fqdn = "$($config.sre.webapps.gitlab.external.hostname).$($config.shm.domain.fqdn)" - $config.sre.webapps.gitlab.external.ip = "$($config.sre.network.subnets.airlock.prefix).151" + $config.sre.webapps.gitlab.hostname = $config.sre.webapps.gitlab.vmName + $config.sre.webapps.gitlab.fqdn = "$($config.sre.webapps.gitlab.hostname).$($config.shm.domain.fqdn)" + $config.sre.webapps.gitlab.ip = "$($config.sre.network.subnets.data.prefix).151" + $config.sre.webapps.gitlabreview.hostname = $config.sre.webapps.gitlabreview.vmName + $config.sre.webapps.gitlabreview.fqdn = "$($config.sre.webapps.gitlabreview.hostname).$($config.shm.domain.fqdn)" + $config.sre.webapps.gitlabreview.ip = "$($config.sre.network.subnets.airlock.prefix).151" $config.sre.webapps.hackmd.hostname = $config.sre.webapps.hackmd.vmName $config.sre.webapps.hackmd.fqdn = "$($config.sre.webapps.hackmd.hostname).$($config.shm.domain.fqdn)" $config.sre.webapps.hackmd.ip = "$($config.sre.network.subnets.data.prefix).152" diff --git a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-review.template.yaml similarity index 64% rename from deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml rename to deployment/secure_research_environment/cloud_init/cloud-init-gitlab-review.template.yaml index 4be96378f1..ca208d675a 100644 --- a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-external.template.yaml +++ b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-review.template.yaml @@ -28,22 +28,22 @@ write_files: - path: /etc/gitlab/gitlab.rb permissions: "0600" content: | - external_url 'http://' + external_url 'http://' gitlab_rails['ldap_enabled'] = true gitlab_rails['ldap_servers'] = YAML.load <<-'EOS' main: # 'main' is the GitLab 'provider ID' of this LDAP server label: 'LDAP' - host: '' + host: '' port: 389 uid: 'sAMAccountName' method: 'plain' # "tls" or "ssl" or "plain" - bind_dn: '' - password: '' + bind_dn: '' + password: '' active_directory: true allow_username_or_email_login: true block_auto_created_users: false - base: '' - user_filter: '' + base: '' + user_filter: '' attributes: username: ['uid', 'userid', 'sAMAccountName'] email: ['mail', 'email', 'userPrincipalName'] @@ -52,51 +52,51 @@ write_files: last_name: 'sn' EOS git_data_dirs({ "default" => { "path" => "/datadrive/gitdata" } }) - # Secrets for Gitlab Internal and External Access, and script to get them + # Secrets for Gitlab and Review Access, and script to get them - path: "/home//.secrets/gitlab-config.json" permissions: "0600" content: | { - "GITLAB-INTERNAL": { - "ip_address": "", - "username": "", - "user_email": "@", - "api_token": "" + "GITLAB": { + "ip_address": "", + "username": "", + "user_email": "@", + "api_token": "" }, - "GITLAB-EXTERNAL": { - "ip_address": "", - "username": "", - "user_email": "@", - "api_token": "" + "GITLAB-REVIEW": { + "ip_address": "", + "username": "", + "user_email": "@", + "api_token": "" } } - path: "/home//gitlab_config.py" permissions: "0755" content: | - # Script for creating projects and merge requests on gitlab-external + # Script for creating projects and merge requests on gitlab-review - path: "/home//zipfile_to_gitlab_project.py" permissions: "0755" content: | - # Script for monitoring and accepting approval merge requests + # Script for monitoring and accepting merge requests - path: "/home//check_merge_requests.py" permissions: "0755" content: | - # Populate SSH known hosts with keys from gitlab internal + # Populate SSH known hosts with keys from gitlab server - path: "/home//.ssh/known_hosts" permissions: "0600" content: | - + runcmd: # -------------------------------- - # SETUP GITLAB EXTERNAL SERVER + # SETUP GITLAB REVIEW SERVER # -------------------------------- # Configure server - - echo "Configuring gitlab external server" - - echo " " >> /etc/hosts + - echo "Configuring gitlab review server" + - echo " " >> /etc/hosts - echo "Europe/London" > /etc/timezone - dpkg-reconfigure -f noninteractive tzdata # Set up the data disk @@ -117,20 +117,20 @@ runcmd: - gitlab-ctl reconfigure # Set root password and don't prompt for it to be reset when web app first loaded - | - echo "user = User.find_by(username: 'root');user.password=user.password_confirmation='';user.password_automatically_set=false;user.save!;exit;" | gitlab-rails console -e production + echo "user = User.find_by(username: 'root');user.password=user.password_confirmation='';user.password_automatically_set=false;user.save!;exit;" | gitlab-rails console -e production # Turn off user account creation - | gitlab-rails runner "ApplicationSetting.last.update_attributes(signup_enabled: false)" # Restrict login to SHM domain (must be done AFTER GitLab update) - | - gitlab-rails runner "ApplicationSetting.last.update_attributes(domain_whitelist: [''])" + gitlab-rails runner "ApplicationSetting.last.update_attributes(domain_whitelist: [''])" # Create user for ingressing external git repos - | - echo "user = User.create(:username => '', :password => '', :password_confirmation => '', :email =>'@', :skip_confirmation => true, :name => '');user.save!;exit;" | gitlab-rails console -e production + echo "user = User.create(:username => '', :password => '', :password_confirmation => '', :email =>'@', :skip_confirmation => true, :name => '');user.save!;exit;" | gitlab-rails console -e production # Create a API token for the ingress user created above - | - token=$(/home//gitlab_config.py --server GITLAB-EXTERNAL --value api_token --file /home//.secrets/gitlab-config.json) - echo "user = User.find_by(username: '');user.personal_access_tokens.create(name: 'apitoken', token_digest: Gitlab::CryptoHelper.sha256('$token'), impersonation: false, scopes: [:api]);exit;" | gitlab-rails console -e production + token=$(/home//gitlab_config.py --server GITLAB-REVIEW --value api_token --file /home//.secrets/gitlab-config.json) + echo "user = User.find_by(username: '');user.personal_access_tokens.create(name: 'apitoken', token_digest: Gitlab::CryptoHelper.sha256('$token'), impersonation: false, scopes: [:api]);exit;" | gitlab-rails console -e production # Reload GitLab configuration and restart GitLab - gitlab-ctl reconfigure - gitlab-ctl restart @@ -141,15 +141,15 @@ runcmd: mkdir -p /home//.ssh ssh-keygen -t ed25519 -C 'gitlab' -N '' -f /home//.ssh/id_ed25519 # -------------------------------- - # GITLAB INTERNAL SSH SETUP + # GITLAB SSH SETUP # -------------------------------- - | key=$(cat /home//.ssh/id_ed25519.pub) - token=$(/home//gitlab_config.py --server GITLAB-INTERNAL --value api_token --file /home//.secrets/gitlab-config.json) - curl --header "Authorization: Bearer $token" --header 'Content-Type:application/json' --data "{\"key\": \"$key\", \"title\": \"InternalAPIUser\"}" /api/v4/user/keys - ssh-keyscan -H >> /home//.ssh/known_hosts + token=$(/home//gitlab_config.py --server GITLAB --value api_token --file /home//.secrets/gitlab-config.json) + curl --header "Authorization: Bearer $token" --header 'Content-Type:application/json' --data "{\"key\": \"$key\", \"title\": \"GitlabAPIUser\"}" /api/v4/user/keys + ssh-keyscan -H >> /home//.ssh/known_hosts # -------------------------------- - # WAIT FOR GITLAB EXTERNAL HEALTH CHECK + # WAIT FOR GITLAB REVIEW HEALTH CHECK # -------------------------------- - | attempt_counter=0 @@ -169,31 +169,31 @@ runcmd: done echo # -------------------------------- - # SETUP ACCESS TO GITLAB EXTERNAL + # SETUP ACCESS TO GITLAB REVIEW # -------------------------------- - - echo "Configuring access to gitlab external" - # Create SSH key for gitlab external access + - echo "Configuring access to gitlab review" + # Create SSH key for gitlab review access - | key=$(cat /home//.ssh/id_ed25519.pub); - token=$(/home//gitlab_config.py --server GITLAB-EXTERNAL --value api_token --file /home//.secrets/gitlab-config.json) - curl --header "Authorization: Bearer $token" --header 'Content-Type:application/json' --data "{\"key\": \"$key\", \"title\": \"ExternalAPIUser\"}" /api/v4/user/keys - # Get local ssh host keys, add them to known hosts under the gitlab external ip + token=$(/home//gitlab_config.py --server GITLAB-REVIEW --value api_token --file /home//.secrets/gitlab-config.json) + curl --header "Authorization: Bearer $token" --header 'Content-Type:application/json' --data "{\"key\": \"$key\", \"title\": \"ReviewAPIUser\"}" /api/v4/user/keys + # Get local ssh host keys, add them to known hosts under the gitlab review ip - | - echo " $(cat /etc/ssh/ssh_host_rsa_key.pub | cut -d " " -f -2)" >> /home//.ssh/known_hosts - echo " $(cat /etc/ssh/ssh_host_ed25519_key.pub | cut -d " " -f -2)" >> /home//.ssh/known_hosts - echo " $(cat /etc/ssh/ssh_host_ecdsa_key.pub | cut -d " " -f -2)" >> /home//.ssh/known_hosts - # Create groups for storing unapproved and approval repos + echo " $(cat /etc/ssh/ssh_host_rsa_key.pub | cut -d " " -f -2)" >> /home//.ssh/known_hosts + echo " $(cat /etc/ssh/ssh_host_ed25519_key.pub | cut -d " " -f -2)" >> /home//.ssh/known_hosts + echo " $(cat /etc/ssh/ssh_host_ecdsa_key.pub | cut -d " " -f -2)" >> /home//.ssh/known_hosts + # Create groups for storing unapproved and approved repos - | - token=$(/home//gitlab_config.py --server GITLAB-EXTERNAL --value api_token --file /home//.secrets/gitlab-config.json) - curl --header "Authorization: Bearer $token" --data "name=approval&path=approval&visibility=internal" /api/v4/groups - curl --header "Authorization: Bearer $token" --data "name=unapproved&path=unapproved&visibility=internal" /api/v4/groups + token=$(/home//gitlab_config.py --server GITLAB-REVIEW --value api_token --file /home//.secrets/gitlab-config.json) + curl --header "Authorization: Bearer $token" --data "name=approved&path=approved&visibility=internal" /api/v4/groups + curl --header "Authorization: Bearer $token" --data "name=unapproved&path=unapproved&visibility=internal" /api/v4/groups # -------------------------------- # GIT SETUP # -------------------------------- - | echo "Configuring git" - HOME=/home/ git config --global user.name '' - HOME=/home/ git config --global user.email '@' + HOME=/home/ git config --global user.name '' + HOME=/home/ git config --global user.email '@' # -------------------------------- # ADD CRONTAB ENTRIES FOR GITLAB SCRIPTS # -------------------------------- diff --git a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-internal.template.yaml b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab.template.yaml similarity index 84% rename from deployment/secure_research_environment/cloud_init/cloud-init-gitlab-internal.template.yaml rename to deployment/secure_research_environment/cloud_init/cloud-init-gitlab.template.yaml index d1dc52fa9b..8e13dd69b0 100644 --- a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-internal.template.yaml +++ b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab.template.yaml @@ -84,10 +84,10 @@ runcmd: gitlab-rails runner "ApplicationSetting.last.update_attributes(domain_whitelist: [''])" # Create user for ingressing external git repos - | - echo "user = User.create(:username => '', :password => '', :password_confirmation => '', :email =>'@', :skip_confirmation => true, :name => '');user.save!;exit;" | gitlab-rails console -e production + echo "user = User.create(:username => '', :password => '', :password_confirmation => '', :email =>'@', :skip_confirmation => true, :name => '');user.save!;exit;" | gitlab-rails console -e production # Create a API token for the ingress user created above - | - echo "user = User.find_by(username: '');user.personal_access_tokens.create(name: 'apitoken', token_digest: Gitlab::CryptoHelper.sha256(''), impersonation: false, scopes: [:api]);exit;" | gitlab-rails console -e production + echo "user = User.find_by(username: '');user.personal_access_tokens.create(name: 'apitoken', token_digest: Gitlab::CryptoHelper.sha256(''), impersonation: false, scopes: [:api]);exit;" | gitlab-rails console -e production # Reload GitLab configuration and restart GitLab - gitlab-ctl reconfigure - gitlab-ctl restart diff --git a/deployment/secure_research_environment/cloud_init/scripts/check_merge_requests.py b/deployment/secure_research_environment/cloud_init/scripts/check_merge_requests.py index 8936490a43..19a69bc9f0 100644 --- a/deployment/secure_research_environment/cloud_init/scripts/check_merge_requests.py +++ b/deployment/secure_research_environment/cloud_init/scripts/check_merge_requests.py @@ -1,15 +1,15 @@ #!/usr/bin/env python3 """ -Check merge requests on gitlab external, approve them where appropriate, -and push the approved repos to gitlab internal. +Check merge requests on gitlab review, approve them where appropriate, +and push the approved repos to gitlab . -1) Get open merge requests in the approval group on gitlab external. +1) Get open merge requests in the approved group on gitlab review. 2) Check whether any of them meet the approval conditions. By default: status is can be merged, not flagged as work in progress, no unresolved discussions, at least two upvotes, and no downvotes. -3) Accept approved merge requests (merged unapproved repo into approval repo). -4) Push whole approval repo to gitlab internal, creating the repo if it doesn't +3) Accept approved merge requests (merged unapproved repo into approved repo). +4) Push whole approved repo to gitlab , creating the repo if it doesn't already exist. This script creates two log files in the same directory that it is run from: @@ -43,24 +43,22 @@ logger.addHandler(c_handler) -def internal_project_exists(repo_name, config): +def check_project_exists(repo_name, config): """Determine whether a repo exist in the ingress namespace on - GITLAB-INTERNAL. + gitlab server defined by config. Parameters ---------- repo_name : str - The name of a repo (not a URL) to search for in the ingress namespace - on GITLAB-INTERNAL. + The name of a repo (not a URL) to search for in the ingress namespace. config : dict - GITLAB-INTERNAL details and secrets as returned by get_gitlab_config + Gitlab details and secrets as returned by get_gitlab_config Returns ------- tuple - (exists, url) tuple where exists: boolean - does repo_name exist on - GITLAB-INTERNAL?, and url: str - the ssh url to the repo (when 'exists' - is true) + (exists, url) tuple where exists: boolean - does repo_name exist, + and url: str - the ssh url to the repo (when 'exists' is true) Raises ------ @@ -71,7 +69,7 @@ def internal_project_exists(repo_name, config): # build url-encoded repo_name repo_path_encoded = url_quote("ingress/" + repo_name, safe="") - # Does repo_name exist on GITLAB-INTERNAL? + # Does repo_name exist? response = requests.get( config["api_url"] + "/projects/" + repo_path_encoded, headers=config["headers"] ) @@ -89,30 +87,30 @@ def internal_project_exists(repo_name, config): ) -def internal_update_repo(git_url, repo_name, branch_name, config): +def update_repo(git_url, repo_name, branch_name, config): """Takes a git URL, `git_url`, which should be the SSH URL to the - "APPROVED" repo on GITLAB-EXTERNAL, clones it and pushes all branches to - the repo `repo_name` owned by 'ingress' on GITLAB-INTERNAL, creating it - there first if it doesn't exist. + "APPROVED" repo on GITLAB-REVIEW, clones it and pushes all branches to + the repo `repo_name` owned by 'ingress' on the gitlab server defined in + config, creating it there first if it doesn't exist. Parameters ---------- git_url : str - URL to the "APPROVED" repo on GITLAB-EXTERNAL + URL to the "APPROVED" repo on GITLAB-REVIEW repo_name : str - Name of repo to create on GITLAB-INTERNAL. + Name of repo to create on. config : dict - GITLAB-INTERNAL details and secrets as returned by get_gitlab_config + Details and secrets as returned by get_gitlab_config """ - # clone the repo from git_url (on GITLAB-EXTERNAL), removing any of + # clone the repo from git_url (on GITLAB-REVIEW), removing any of # the same name first (simpler than checking if it exists, has the # same remote and pulling) subprocess.run(["rm", "-rf", repo_name], check=True) subprocess.run(["git", "clone", git_url, repo_name], check=True) subprocess.run(["git", "checkout", branch_name], cwd=repo_name, check=True) - project_exists, gl_internal_repo_url = internal_project_exists(repo_name, config) + project_exists, gl_update_repo_url = check_project_exists(repo_name, config) # create the project if it doesn't exist if not project_exists: @@ -125,18 +123,18 @@ def internal_update_repo(git_url, repo_name, branch_name, config): response.raise_for_status() assert response.json()["path_with_namespace"] == "ingress/" + repo_name - gl_internal_repo_url = response.json()["ssh_url_to_repo"] + gl_update_repo_url = response.json()["ssh_url_to_repo"] # Set the remote subprocess.run( - ["git", "remote", "add", "gitlab-internal", gl_internal_repo_url], + ["git", "remote", "add", "gitlab", gl_update_repo_url], cwd=repo_name, check=True, ) # Force push current contents of all branches subprocess.run( - ["git", "push", "--force", "gitlab-internal"], cwd=repo_name, check=True + ["git", "push", "--force", "gitlab"], cwd=repo_name, check=True ) @@ -253,7 +251,7 @@ def get_project(project_id, config): def get_merge_requests_for_approval(config): - """Get the details of all open merge requests into the approval group on + """Get the details of all open merge requests into the approved group on a gitlab server. Parameters @@ -266,7 +264,7 @@ def get_merge_requests_for_approval(config): list List of merge requests JSONs as returned by the gitlab API. """ - group = get_group_id("approval", config) + group = get_group_id("approved", config) endpoint = config["api_url"] + f"/groups/{group}/merge_requests" response = get_request( endpoint, @@ -339,37 +337,37 @@ def accept_merge_request(mr, config): def check_merge_requests(): - """Main function to check merge requests in the approval group on gitlab external, - approve them where appropriate, and then push the approved repos to gitlab - internal. + """Main function to check merge requests in the approved group on gitlab review, + approve them where appropriate, and then push the approved repos to the normal + gitlab server for users.. """ logger.info(f"STARTING RUN") try: - config_external = get_api_config(server="GITLAB-EXTERNAL") - config_internal = get_api_config(server="GITLAB-INTERNAL") + config_gitlabreview = get_api_config(server="GITLAB-REVIEW") + config_gitlab = get_api_config(server="GITLAB") except Exception as e: logger.critical(f"Failed to load gitlab secrets: {e}") return try: - internal_status = requests.get( - config_internal["api_url"] + "/projects", - headers=config_internal["headers"], + gitlab_status = requests.get( + config_gitlab["api_url"] + "/projects", + headers=config_gitlab["headers"], timeout=10, ) - if not internal_status.ok: + if not gitlab_status.ok: logger.critical( - f"Gitlab Internal Not Responding: {internal_status.status_code}, CONTENT {internal_status.content}" + f"Gitlab Not Responding: {gitlab_status.status_code}, CONTENT {gitlab_status.content}" ) return except Exception as e: - logger.critical(f"Gitlab Internal Not Responding: {e}") + logger.critical(f"Gitlab Not Responding: {e}") return logger.info("Getting open merge requests for approval") try: - merge_requests = get_merge_requests_for_approval(config_external) + merge_requests = get_merge_requests_for_approval(config_gitlabreview) except Exception as e: logger.critical(f"Failed to get merge requests: {e}") return @@ -380,10 +378,10 @@ def check_merge_requests(): logger.info(f"Merge request {i+1} out of {len(merge_requests)}") try: # Extract merge request details - source_project = get_project(mr["source_project_id"], config_external) + source_project = get_project(mr["source_project_id"], config_gitlabreview) logger.info(f"Source Project: {source_project['name_with_namespace']}") logger.info(f"Source Branch: {mr['source_branch']}") - target_project = get_project(mr["project_id"], config_external) + target_project = get_project(mr["project_id"], config_gitlabreview) logger.info(f"Target Project: {target_project['name_with_namespace']}") target_branch = mr["target_branch"] logger.info(f"Target Branch: {target_branch}") @@ -398,7 +396,7 @@ def check_merge_requests(): logger.info(f"Merge Status: {status}") wip = mr["work_in_progress"] logger.info(f"Work in Progress: {wip}") - unresolved = count_unresolved_mr_discussions(mr, config_external) + unresolved = count_unresolved_mr_discussions(mr, config_gitlabreview) logger.info(f"Unresolved Discussions: {unresolved}") upvotes = mr["upvotes"] logger.info(f"Upvotes: {upvotes}") @@ -416,7 +414,7 @@ def check_merge_requests(): ): logger.info("Merge request has been approved. Proceeding with merge.") try: - result = accept_merge_request(mr, config_external) + result = accept_merge_request(mr, config_gitlabreview) except Exception as e: logger.error(f"Merge failed! {e}") continue @@ -431,15 +429,15 @@ def check_merge_requests(): except Exception as e: logger.error(f"Failed to log accepted merge request: {e}") try: - logger.info("Pushing project to gitlab internal.") - internal_update_repo( + logger.info("Pushing project to gitlab user server.") + update_repo( target_project["ssh_url_to_repo"], target_project["name"], target_branch, - config_internal, + config_gitlab, ) except Exception as e: - logger.error(f"Failed to push to internal: {e}") + logger.error(f"Failed to push to gitlab user server: {e}") else: logger.error(f"Merge failed! Merge status is {result['state']}") else: diff --git a/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py b/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py index 5757ee9d5a..02f08399ba 100644 --- a/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py +++ b/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py @@ -61,7 +61,7 @@ def unzip_zipfiles(zipfile_dir, tmp_unzipped_dir): def get_group_namespace_ids( - gitlab_url, gitlab_token, groups=["approval", "unapproved"] + gitlab_url, gitlab_token, groups=["approved", "unapproved"] ): namespaces_url = "{}/namespaces/".format(gitlab_url) response = requests.get( @@ -154,7 +154,7 @@ def create_project(repo_name, namespace_id, gitlab_url, gitlab_token): ## For Reviewers -There is a merge request into this repository (`approval/{repo_name}`) +There is a merge request into this repository (`approved/{repo_name}`) for each ingress request. Please look at each merge request in turn, and review it using the @@ -408,14 +408,14 @@ def clone_commit_and_push( assert os.path.exists(working_dir) # Add upstream (target repo) to this repo - subprocess.run(["git", "remote", "add", "approval", target_project_url], cwd=working_dir, check=True) - subprocess.run(["git", "fetch", "approval"], cwd=working_dir, check=True) + subprocess.run(["git", "remote", "add", "approved", target_project_url], cwd=working_dir, check=True) + subprocess.run(["git", "fetch", "approved"], cwd=working_dir, check=True) # Checkout the branch with the requested name (creating it at the # current commit of the default branch if it doesn't exist) git_checkout_result = subprocess.run(["git", "checkout", target_branch_name], cwd=working_dir) if git_checkout_result.returncode == 0: - subprocess.run(["git", "pull", "approval"], cwd=working_dir, check=True) + subprocess.run(["git", "pull", "approved"], cwd=working_dir, check=True) # now checkout the branch holding the snapshot subprocess.run(["git", "checkout", "-b", branch_name], cwd=working_dir, check=True) @@ -437,7 +437,7 @@ def clone_commit_and_push( subprocess.run(["git", "add", "."], cwd=working_dir, check=True) commit_msg = "Import snapshot of {} at commit {}".format(remote_url, commit_hash) subprocess.run(["git", "commit", "-m", commit_msg], cwd=working_dir, check=True) - # Push back to gitlab external (unapproved) + # Push back to gitlab review (unapproved) subprocess.run( ["git", "push", "-f", "--set-upstream", "origin", branch_name], cwd=working_dir, @@ -462,7 +462,7 @@ def fork_project(repo_name, project_id, namespace_id, gitlab_url, gitlab_token): raise RuntimeError("Problem creating fork: {}".format(response.content)) new_project_info = response.json()#["id"] else: - # project already exists - ensure it is a fork of 'approval/' + # project already exists - ensure it is a fork of 'approved/' new_project_info = get_or_create_project( repo_name, namespace_id, gitlab_url, gitlab_token ) @@ -528,7 +528,7 @@ def unzipped_snapshot_to_merge_request( commit_hash, ) - # Create the branch on the "approval" project if it doesn't already exist + # Create the branch on the "approved" project if it doesn't already exist create_branch_if_not_exists( target_branch_name, target_project_id, @@ -578,14 +578,14 @@ def main(): os.makedirs(TMP_REPO_DIR) # get the gitlab config - config = get_api_config("GITLAB-EXTERNAL") + config = get_api_config("GITLAB-REVIEW") # unzip the zipfiles, and retrieve a list of tuples describing # (repo_name, commit_hash, desired_branch, unzipped_location) unzipped_snapshots = unzip_zipfiles(ZIPFILE_DIR, TMP_UNZIPPED_DIR) - # get the namespace_ids of our "approval" and "unapproved" groups - GROUPS = ["unapproved", "approval"] + # get the namespace_ids of our "approved" and "unapproved" groups + GROUPS = ["unapproved", "approved"] namespace_ids = get_group_namespace_ids( config["api_url"], config["api_token"], GROUPS ) diff --git a/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project_doc.py b/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project_doc.py index 48a103c987..829039cb16 100644 --- a/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project_doc.py +++ b/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project_doc.py @@ -9,17 +9,17 @@ 1) get useful gitlab stuff (url, api key, namespace_ids for our groups) 2) unzip zipfiles in specified directory 3) loop over unzipped repos. For each one: - a) see if "approval" project with same name exists, if not, create it - b) check if merge request to "approval/" with source and target branches + a) see if "approved" project with same name exists, if not, create it + b) check if merge request to "approved/" with source and target branches "commit-" and "" already exists. If so, skip to the next unzipped repo. - b) see if "unapproved" project with same name exists, if not, fork "approval" one + b) see if "unapproved" project with same name exists, if not, fork "approved" one c) clone "unapproved" project, and create branch called "commit-" d) copy in contents of unzipped repo. e) git add, commit and push to "unapproved" project - f) create branch "" on "approval" project + f) create branch "" on "approved" project g) create merge request from unapproved/repo_name/commit_hash to - approval/repo_name/desired_branch_name + approved/repo_name/desired_branch_name 4) clean up - remove zipfiles and unpacked repos. """ @@ -45,7 +45,7 @@ get_group_namespace_ids.__doc__ = """ Find the namespace_id corresponding to the groups we're interested in, -e.g. 'approval' and 'unapproved'. +e.g. 'approved' and 'unapproved'. Parameters ========== @@ -63,7 +63,7 @@ Parameters ========== -namespace_id: int, ID of the group ("unapproved" or "approval") +namespace_id: int, ID of the group ("unapproved" or "approved") gitlab_url: str, base URL for the API gitlab_token: str, API token. @@ -79,7 +79,7 @@ Parameters ========== repo_name: str, name of our repository/project -namespace_id: int, id of our group ("unapproved" or "approval") +namespace_id: int, id of our group ("unapproved" or "approved") gitlab_url: str, base URL of Gitlab API gitlab_token: str, API key for Gitlab API. @@ -95,7 +95,7 @@ Parameters ========== repo_name: str, name of our repository/project -namespace_id: int, id of our group ("unapproved" or "approval") +namespace_id: int, id of our group ("unapproved" or "approved") gitlab_url: str, base URL of Gitlab API gitlab_token: str, API key for Gitlab API. @@ -106,14 +106,14 @@ get_project_id.__doc__ = """ Given the name of a repository and namespace_id (i.e. group, -"unapproved" or "approval"), either return the remote URL for project +"unapproved" or "approved"), either return the remote URL for project matching the repo name, or create it if it doesn't exist already, and again return the remote URL. Parameters ========== repo_name: str, name of the repository/project we're looking for. -namespace_id: int, the ID of the group ("unapproved" or "approval") +namespace_id: int, the ID of the group ("unapproved" or "approved") gitlab_url: str, base URL of the API gitlab_token: str, API key @@ -180,7 +180,7 @@ ========== source_branch: str, name of the branch on source project, will typically be the commit_hash from the original repo. -target_project_id: int, project_id for the "approval" group's project. +target_project_id: int, project_id for the "approved" group's project. target_branch: str, name of branch on target project, will typically be the desired branch name. gitlab_url: str, base URL for the Gitlab API @@ -193,7 +193,7 @@ create_merge_request.__doc__ = """ Create a new MR, e.g. from the branch in the "unapproved" -group's project, to the branch in the "approval" +group's project, to the branch in the "approved" group's project. Parameters @@ -204,7 +204,7 @@ projects API endpoint. source_branch: str, name of the branch on source project, will typically be the 'branch-'. -target_project_id: int, project_id for the "approval" group's project. +target_project_id: int, project_id for the "approved" group's project. target_branch: str, name of branch on target project, will typically be the desired branch name. gitlab_url: str, base URL for the Gitlab API @@ -227,18 +227,18 @@ path_to_unzipped_repo: str, the full directory path to the unzipped repo tmp_repo_dir: str, path to a temporary dir where we will clone the project branch_name: str, the name of the branch to push to -remote_url: str, the URL for this project on gitlab-external to be added +remote_url: str, the URL for this project on gitlab-review to be added as a "remote". """ fork_project.__doc__ = """ -Fork the project 'approval/' to 'unapproved/' +Fork the project 'approved/' to 'unapproved/' after first checking whether the latter exists. Parameters ========== repo_name: str, name of the repo/project -project_id: int, project id of the 'approval/' project +project_id: int, project id of the 'approved/' project namespace_id: int, id of the 'unapproved' namespace gitlab_url: str, str, the base URL of Gitlab API gitlab_token: str, API token for Gitlab API @@ -256,9 +256,9 @@ repo_details: tuple of strings, (repo_name, hash, desired_branch, location) tmp_repo_dir: str, directory where we will clone the repo, then copy the contents in gitlab_config: dict, contains api url and token -namespace_ids; dict, keys are the group names (e.g. "unapproved", "approval", values +namespace_ids; dict, keys are the group names (e.g. "unapproved", "approved", values are the ids of the corresponding namespaces in Gitlab -group_names: list of strings, typically ["unapproved", "approval"] +group_names: list of strings, typically ["unapproved", "approved"] """ cleanup.__doc__ = """ diff --git a/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 b/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 index f95d049f02..9991b4693b 100644 --- a/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 +++ b/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 @@ -25,20 +25,20 @@ $sreAdminPassword = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name $gitlabRootPassword = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -SecretName $config.sre.keyVault.secretNames.gitlabRootPassword $gitlabUserPassword = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -SecretName $config.sre.keyVault.secretNames.gitlabUserPassword $gitlabLdapPassword = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -SecretName $config.sre.keyVault.secretNames.gitlabLdapPassword -$gitlabExternalUsername = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -SecretName $config.sre.keyVault.secretNames.gitlabExternalUsername -DefaultValue "ingress" -$gitlabExternalPassword = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -SecretName $config.sre.keyVault.secretNames.gitlabExternalPassword -$gitlabExternalAPIToken = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -SecretName $config.sre.keyVault.secretNames.gitlabExternalAPIToken +$gitlabReviewUsername = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -SecretName $config.sre.keyVault.secretNames.gitlabReviewUsername -DefaultValue "ingress" +$gitlabReviewPassword = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -SecretName $config.sre.keyVault.secretNames.gitlabReviewPassword +$gitlabReviewAPIToken = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -SecretName $config.sre.keyVault.secretNames.gitlabReviewAPIToken $hackmdUserPassword = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -SecretName $config.sre.keyVault.secretNames.hackmdUserPassword $hackmdLdapPassword = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -SecretName $config.sre.keyVault.secretNames.hackmdLdapPassword -$gitlabInternalUsername = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -SecretName $config.sre.keyVault.secretNames.gitlabInternalUsername -DefaultValue "ingress" -$gitlabInternalPassword = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -SecretName $config.sre.keyVault.secretNames.gitlabInternalPassword -$gitlabInternalAPIToken = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -SecretName $config.sre.keyVault.secretNames.gitlabInternalAPIToken +$gitlabUsername = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -SecretName $config.sre.keyVault.secretNames.gitlabUsername -DefaultValue "ingress" +$gitlabPassword = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -SecretName $config.sre.keyVault.secretNames.gitlabPassword +$gitlabAPIToken = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -SecretName $config.sre.keyVault.secretNames.gitlabAPIToken # Set up the NSG for the webapps # ------------------------------ -$nsgGitlabInternal = Deploy-NetworkSecurityGroup -Name $config.sre.webapps.nsg -ResourceGroupName $config.sre.network.vnet.rg -Location $config.sre.location -Add-NetworkSecurityGroupRule -NetworkSecurityGroup $nsgGitlabInternal ` +$nsgGitlab = Deploy-NetworkSecurityGroup -Name $config.sre.webapps.nsg -ResourceGroupName $config.sre.network.vnet.rg -Location $config.sre.location +Add-NetworkSecurityGroupRule -NetworkSecurityGroup $nsgGitlab ` -Name "OutboundDenyInternet" ` -Description "Outbound deny internet" ` -Priority 4000 ` @@ -47,15 +47,7 @@ Add-NetworkSecurityGroupRule -NetworkSecurityGroup $nsgGitlabInternal ` -DestinationAddressPrefix Internet -DestinationPortRange * -$nsgGitlabExternal = Deploy-NetworkSecurityGroup -Name $config.sre.network.nsg.airlock.name -ResourceGroupName $config.sre.network.vnet.rg -Location $config.sre.location -# TODO Removed for development testing -# Add-NetworkSecurityGroupRule -NetworkSecurityGroup $nsgGitlabExternal ` -# -Name "InboundDenyAll" ` -# -Description "Inbound deny everything" ` -# -Priority 4000 ` -# -Direction Inbound -Access Deny -Protocol * ` -# -SourceAddressPrefix * -SourcePortRange * ` -# -DestinationAddressPrefix * -DestinationPortRange * +$nsgGitlabReview = Deploy-NetworkSecurityGroup -Name $config.sre.network.nsg.airlock.name -ResourceGroupName $config.sre.network.vnet.rg -Location $config.sre.location # Check that VNET and subnet exist @@ -64,29 +56,29 @@ $nsgGitlabExternal = Deploy-NetworkSecurityGroup -Name $config.sre.network.nsg.a $vnet = Deploy-VirtualNetwork -Name $config.sre.network.vnet.Name -ResourceGroupName $config.sre.network.vnet.rg -AddressPrefix $config.sre.network.vnet.cidr -Location $config.sre.location $subnet = Deploy-Subnet -Name $config.sre.network.subnets.airlock.Name -VirtualNetwork $vnet -AddressPrefix $config.sre.network.subnets.airlock.cidr -Set-SubnetNetworkSecurityGroup -Subnet $subnet -NetworkSecurityGroup $nsgGitlabExternal -VirtualNetwork $vnet +Set-SubnetNetworkSecurityGroup -Subnet $subnet -NetworkSecurityGroup $nsgGitlabReview -VirtualNetwork $vnet -# Expand GitLab internal cloudinit +# Expand GitLab cloudinit # -------------------------------- $shmDcFqdn = ($config.shm.dc.hostname + "." + $config.shm.domain.fqdn) -$gitlabFqdn = $config.sre.webapps.gitlab.internal.hostname + "." + $config.sre.domain.fqdn +$gitlabFqdn = $config.sre.webapps.gitlab.hostname + "." + $config.sre.domain.fqdn $gitlabLdapUserDn = "CN=" + $config.sre.users.ldap.gitlab.name + "," + $config.shm.domain.serviceOuPath $gitlabUserFilter = "(&(objectClass=user)(memberOf=CN=" + $config.sre.domain.securityGroups.researchUsers.name + "," + $config.shm.domain.securityOuPath + "))" -$gitlabCloudInitTemplate = Join-Path $PSScriptRoot ".." "cloud_init" "cloud-init-gitlab-internal.template.yaml" | Get-Item | Get-Content -Raw +$gitlabCloudInitTemplate = Join-Path $PSScriptRoot ".." "cloud_init" "cloud-init-gitlab.template.yaml" | Get-Item | Get-Content -Raw $gitlabCloudInit = $gitlabCloudInitTemplate.Replace('', $shmDcFqdn). Replace('', $gitlabLdapUserDn). Replace('',$gitlabLdapPassword). Replace('',$config.shm.domain.userOuPath). Replace('',$gitlabUserFilter). - Replace('',$config.sre.webapps.gitlab.internal.ip). - Replace('',$config.sre.webapps.gitlab.internal.hostname). + Replace('',$config.sre.webapps.gitlab.ip). + Replace('',$config.sre.webapps.gitlab.hostname). Replace('',$gitlabFqdn). Replace('',$gitlabRootPassword). Replace('',$config.shm.domain.fqdn). - Replace('',$gitlabInternalUsername). - Replace('',$gitlabInternalPassword). - Replace('',$gitlabInternalAPIToken) + Replace('',$gitlabUsername). + Replace('',$gitlabPassword). + Replace('',$gitlabAPIToken) # Encode as base64 $gitlabCloudInitEncoded = [System.Convert]::ToBase64String([System.Text.Encoding]::UTF8.GetBytes($gitlabCloudInit)) @@ -125,9 +117,9 @@ $params = @{ Administrator_User = $sreAdminUsername BootDiagnostics_Account_Name = $config.sre.storage.bootdiagnostics.accountName GitLab_Cloud_Init = $gitlabCloudInitEncoded - GitLab_IP_Address = $config.sre.webapps.gitlab.internal.ip - GitLab_Server_Name = $config.sre.webapps.gitlab.internal.vmName - GitLab_VM_Size = $config.sre.webapps.gitlab.internal.vmSize + GitLab_IP_Address = $config.sre.webapps.gitlab.ip + GitLab_Server_Name = $config.sre.webapps.gitlab.vmName + GitLab_VM_Size = $config.sre.webapps.gitlab.vmSize HackMD_Cloud_Init = $hackmdCloudInitEncoded HackMD_IP_Address = $config.sre.webapps.hackmd.ip HackMD_Server_Name = $config.sre.webapps.hackmd.vmName @@ -143,31 +135,31 @@ Deploy-ArmTemplate -TemplatePath (Join-Path $PSScriptRoot ".." "arm_templates" " # ----------------------------------------------- Add-LogMessage -Level Info "Waiting for cloud-init provisioning to finish (this will take 5+ minutes)..." $progress = 0 -$gitlabStatuses = (Get-AzVM -Name $config.sre.webapps.gitlab.internal.vmName -ResourceGroupName $config.sre.webapps.rg -Status).Statuses.Code +$gitlabStatuses = (Get-AzVM -Name $config.sre.webapps.gitlab.vmName -ResourceGroupName $config.sre.webapps.rg -Status).Statuses.Code $hackmdStatuses = (Get-AzVM -Name $config.sre.webapps.hackmd.vmName -ResourceGroupName $config.sre.webapps.rg -Status).Statuses.Code while (-Not ($gitlabStatuses.Contains("ProvisioningState/succeeded") -and $gitlabStatuses.Contains("PowerState/stopped") -and $hackmdStatuses.Contains("ProvisioningState/succeeded") -and $hackmdStatuses.Contains("PowerState/stopped"))) { $progress = [math]::min(100, $progress + 1) - $gitlabStatuses = (Get-AzVM -Name $config.sre.webapps.gitlab.internal.vmName -ResourceGroupName $config.sre.webapps.rg -Status).Statuses.Code + $gitlabStatuses = (Get-AzVM -Name $config.sre.webapps.gitlab.vmName -ResourceGroupName $config.sre.webapps.rg -Status).Statuses.Code $hackmdStatuses = (Get-AzVM -Name $config.sre.webapps.hackmd.vmName -ResourceGroupName $config.sre.webapps.rg -Status).Statuses.Code - Write-Progress -Activity "Deployment status:" -Status "GitLab Internal [$($gitlabStatuses[0]) $($gitlabStatuses[1])], HackMD [$($hackmdStatuses[0]) $($hackmdStatuses[1])]" -PercentComplete $progress + Write-Progress -Activity "Deployment status:" -Status "GitLab [$($gitlabStatuses[0]) $($gitlabStatuses[1])], HackMD [$($hackmdStatuses[0]) $($hackmdStatuses[1])]" -PercentComplete $progress Start-Sleep 10 } # While webapp servers are off, ensure they are bound to correct NSG # ------------------------------------------------------------------ Add-LogMessage -Level Info "Ensure webapp servers and compute VMs are bound to correct NSG..." -foreach ($vmName in ($config.sre.webapps.hackmd.vmName, $config.sre.webapps.gitlab.internal.vmName)) { - Add-VmToNSG -VMName $vmName -NSGName $nsgGitlabInternal.Name +foreach ($vmName in ($config.sre.webapps.hackmd.vmName, $config.sre.webapps.gitlab.vmName)) { + Add-VmToNSG -VMName $vmName -NSGName $nsgGitlab.Name } Start-Sleep -Seconds 30 -Add-LogMessage -Level Info "Summary: NICs associated with '$($nsgGitlabInternal.Name)' NSG" -@($nsgGitlabInternal.NetworkInterfaces) | ForEach-Object { Add-LogMessage -Level Info "=> $($_.Id.Split('/')[-1])" } +Add-LogMessage -Level Info "Summary: NICs associated with '$($nsgGitlab.Name)' NSG" +@($nsgGitlab.NetworkInterfaces) | ForEach-Object { Add-LogMessage -Level Info "=> $($_.Id.Split('/')[-1])" } # Finally, reboot the webapp servers # ---------------------------------- -foreach ($nameVMNameParamsPair in (("HackMD", $config.sre.webapps.hackmd.vmName), ("GitLab", $config.sre.webapps.gitlab.internal.vmName))) { +foreach ($nameVMNameParamsPair in (("HackMD", $config.sre.webapps.hackmd.vmName), ("GitLab", $config.sre.webapps.gitlab.vmName))) { $name, $vmName = $nameVMNameParamsPair Add-LogMessage -Level Info "Rebooting the $name VM: '$vmName'" Enable-AzVM -Name $vmName -ResourceGroupName $config.sre.webapps.rg @@ -178,44 +170,44 @@ foreach ($nameVMNameParamsPair in (("HackMD", $config.sre.webapps.hackmd.vmName) } } -# Deploy NIC and data disks for gitlab.external +# Deploy NIC and data disks for gitlab review # --------------------------------------------- -$vmName = $config.sre.webapps.gitlab.external.vmName -$vmIpAddress = $config.sre.webapps.gitlab.external.ip +$vmName = $config.sre.webapps.gitlabreview.vmName +$vmIpAddress = $config.sre.webapps.gitlabreview.ip $vmNic = Deploy-VirtualMachineNIC -Name "$vmName-NIC" -ResourceGroupName $config.sre.webapps.rg -Subnet $subnet -PrivateIpAddress $vmIpAddress -Location $config.sre.location -# Deploy the GitLab external VM +# Deploy the GitLab review VM # ------------------------------ $bootDiagnosticsAccount = Deploy-StorageAccount -Name $config.sre.storage.bootdiagnostics.accountName -ResourceGroupName $config.sre.storage.bootdiagnostics.rg -Location $config.sre.location $shmDcFqdn = ($config.shm.dc.hostname + "." + $config.shm.domain.fqdn) -$gitlabFqdn = $config.sre.webapps.gitlab.external.hostname + "." + $config.sre.domain.fqdn +$gitlabFqdn = $config.sre.webapps.gitlabreview.hostname + "." + $config.sre.domain.fqdn $gitlabLdapUserDn = "CN=" + $config.sre.users.ldap.gitlab.name + "," + $config.shm.domain.serviceOuPath $gitlabUserFilter = "(&(objectClass=user)(memberOf=CN=" + $config.sre.domain.securityGroups.reviewUsers.name + "," + $config.shm.domain.securityOuPath + "))" -$gitlabExternalCloudInitTemplate = Join-Path $PSScriptRoot ".." "cloud_init" "cloud-init-gitlab-external.template.yaml" | Get-Item | Get-Content -Raw +$gitlabReviewCloudInitTemplate = Join-Path $PSScriptRoot ".." "cloud_init" "cloud-init-gitlab-review.template.yaml" | Get-Item | Get-Content -Raw -# Get public SSH keys from gitlab internal (so it can be added as a known host on gitlab external) +# Get public SSH keys from gitlab (so it can be added as a known host on gitlab review) # ------------------------------ $script = ' #! /bin/bash -echo " $(cat /etc/ssh/ssh_host_rsa_key.pub | cut -d " " -f -2)" -echo " $(cat /etc/ssh/ssh_host_ed25519_key.pub | cut -d " " -f -2)" -echo " $(cat /etc/ssh/ssh_host_ecdsa_key.pub | cut -d " " -f -2)" -'.Replace('', $config.sre.webapps.gitlab.internal.ip) -$vmNameInternal = $config.sre.webapps.gitlab.internal.vmName -$result = Invoke-RemoteScript -VMName $vmNameInternal -ResourceGroupName $config.sre.webapps.rg -Shell "UnixShell" -Script $script -Add-LogMessage -Level Success "Fetching ssh keys from gitlab internal succeeded" +echo " $(cat /etc/ssh/ssh_host_rsa_key.pub | cut -d " " -f -2)" +echo " $(cat /etc/ssh/ssh_host_ed25519_key.pub | cut -d " " -f -2)" +echo " $(cat /etc/ssh/ssh_host_ecdsa_key.pub | cut -d " " -f -2)" +'.Replace('', $config.sre.webapps.gitlab.ip) +$vmName = $config.sre.webapps.gitlab.vmName +$result = Invoke-RemoteScript -VMName $vmName -ResourceGroupName $config.sre.webapps.rg -Shell "UnixShell" -Script $script +Add-LogMessage -Level Success "Fetching ssh keys from gitlab succeeded" # Extract everything in between the [stdout] and [stderr] blocks of the result message. i.e. all output of the script. -$internalSshKeys = $result.Value[0].Message | Select-String "\[stdout\]\s*([\s\S]*?)\s*\[stderr\]" -$internalSshKeys = $internalSshKeys.Matches.Groups[1].Value +$sshKeys = $result.Value[0].Message | Select-String "\[stdout\]\s*([\s\S]*?)\s*\[stderr\]" +$sshKeys = $sshKeys.Matches.Groups[1].Value # Insert keys into cloud init template, maintaining indentation $indent = " " -$indented_internalSshKeys = $internalSshKeys -split "`n" | ForEach-Object { "${indent}$_" } | Join-String -Separator "`n" -$gitlabExternalCloudInitTemplate = $gitlabExternalCloudInitTemplate.Replace("${indent}", $indented_internalSshKeys) +$indented_sshKeys = $sshKeys -split "`n" | ForEach-Object { "${indent}$_" } | Join-String -Separator "`n" +$gitlabReviewCloudInitTemplate = $gitlabReviewCloudInitTemplate.Replace("${indent}", $indented_sshKeys) # Insert scripts into the cloud-init template @@ -226,36 +218,36 @@ foreach ($scriptName in @("zipfile_to_gitlab_project.py", "gitlab_config.py")) { $raw_script = Get-Content (Join-Path $PSScriptRoot ".." "cloud_init" "scripts" $scriptName) -Raw $indented_script = $raw_script -split "`n" | ForEach-Object { "${indent}$_" } | Join-String -Separator "`n" - $gitlabExternalCloudInitTemplate = $gitlabExternalCloudInitTemplate.Replace("${indent}<$scriptName>", $indented_script) + $gitlabReviewCloudInitTemplate = $gitlabReviewCloudInitTemplate.Replace("${indent}<$scriptName>", $indented_script) } -$gitlabExternalCloudInit = $gitlabExternalCloudInitTemplate.Replace('',$sreAdminUsername). - Replace('',$config.sre.webapps.gitlab.internal.ip). - Replace('',$config.shm.domain.fqdn). - Replace('',$gitlabInternalUsername). - Replace('',$gitlabInternalAPIToken). - Replace('', $shmDcFqdn). - Replace('', $gitlabLdapUserDn). - Replace('',$gitlabLdapPassword). - Replace('',$config.shm.domain.userOuPath). - Replace('',$gitlabUserFilter). - Replace('',$config.sre.webapps.gitlab.external.ip). - Replace('',$config.sre.webapps.gitlab.external.hostname). - Replace('',$gitlabFqdn). - Replace('',$gitlabRootPassword). - Replace('',$config.shm.domain.fqdn). - Replace('',$gitlabExternalUsername). - Replace('',$gitlabExternalPassword). - Replace('',$gitlabExternalAPIToken) +$gitlabReviewCloudInit = $gitlabReviewCloudInitTemplate.Replace('',$sreAdminUsername). + Replace('',$config.sre.webapps.gitlab.ip). + Replace('',$config.shm.domain.fqdn). + Replace('',$gitlabUsername). + Replace('',$gitlabAPIToken). + Replace('', $shmDcFqdn). + Replace('', $gitlabLdapUserDn). + Replace('',$gitlabLdapPassword). + Replace('',$config.shm.domain.userOuPath). + Replace('',$gitlabUserFilter). + Replace('',$config.sre.webapps.gitlabreview.ip). + Replace('',$config.sre.webapps.gitlabreview.hostname). + Replace('',$gitlabFqdn). + Replace('',$gitlabRootPassword). + Replace('',$config.shm.domain.fqdn). + Replace('',$gitlabReviewUsername). + Replace('',$gitlabReviewPassword). + Replace('',$gitlabReviewAPIToken) $params = @{ Name = $vmName - Size = $config.sre.webapps.gitlab.external.vmSize + Size = $config.sre.webapps.gitlabreview.vmSize AdminPassword = $sreAdminPassword AdminUsername = $sreAdminUsername BootDiagnosticsAccount = $bootDiagnosticsAccount - CloudInitYaml = $gitlabExternalCloudInit + CloudInitYaml = $gitlabReviewCloudInit location = $config.sre.location NicId = $vmNic.Id OsDiskType = "Standard_LRS" @@ -266,11 +258,11 @@ $_ = Deploy-UbuntuVirtualMachine @params Add-LogMessage -Level Info "Waiting for cloud-init provisioning to finish (this will take 5+ minutes)..." $progress = 0 -$gitlabStatuses = (Get-AzVM -Name $config.sre.webapps.gitlab.external.vmName -ResourceGroupName $config.sre.webapps.rg -Status).Statuses.Code +$gitlabStatuses = (Get-AzVM -Name $config.sre.webapps.gitlabreview.vmName -ResourceGroupName $config.sre.webapps.rg -Status).Statuses.Code while (-Not ($gitlabStatuses.Contains("ProvisioningState/succeeded") -and $gitlabStatuses.Contains("PowerState/stopped"))) { $progress = [math]::min(100, $progress + 1) - $gitlabStatuses = (Get-AzVM -Name $config.sre.webapps.gitlab.external.vmName -ResourceGroupName $config.sre.webapps.rg -Status).Statuses.Code - Write-Progress -Activity "Deployment status:" -Status "GitLab External [$($gitlabStatuses[0]) $($gitlabStatuses[1])]" -PercentComplete $progress + $gitlabStatuses = (Get-AzVM -Name $config.sre.webapps.gitlabreview.vmName -ResourceGroupName $config.sre.webapps.rg -Status).Statuses.Code + Write-Progress -Activity "Deployment status:" -Status "GitLab Review [$($gitlabStatuses[0]) $($gitlabStatuses[1])]" -PercentComplete $progress Start-Sleep 10 } diff --git a/environment_configs/full/sre_testasandbox_full_config.json b/environment_configs/full/sre_testasandbox_full_config.json index 7ffb8a5d54..08826429f9 100644 --- a/environment_configs/full/sre_testasandbox_full_config.json +++ b/environment_configs/full/sre_testasandbox_full_config.json @@ -232,14 +232,14 @@ "gitlabLdapPassword": "sre-sandbox-gitlab-ldap-password", "gitlabRootPassword": "sre-sandbox-gitlab-root-password", "gitlabUserPassword": "sre-sandbox-gitlab-user-password", - "gitlabInternalUsername": "sre-sandbox-gitlab-internal-username", - "gitlabInternalPassword": "sre-sandbox-gitlab-internal-password", - "gitlabInternalAPIToken": "sre-sandbox-gitlab-internal-api-token", + "gitlabUsername": "sre-sandbox-gitlab-username", + "gitlabPassword": "sre-sandbox-gitlab-password", + "gitlabAPIToken": "sre-sandbox-gitlab-api-token", "hackmdLdapPassword": "sre-sandbox-hackmd-ldap-password", "hackmdUserPassword": "sre-sandbox-hackmd-user-password", - "gitlabExternalUsername": "sre-sandbox-gitlab-external-username", - "gitlabExternalPassword": "sre-sandbox-gitlab-external-password", - "gitlabExternalAPIToken": "sre-sandbox-gitlab-external-api-token", + "gitlabReviewUsername": "sre-sandbox-gitlab-review-username", + "gitlabReviewPassword": "sre-sandbox-gitlab-review-password", + "gitlabReviewAPIToken": "sre-sandbox-gitlab-review-api-token", "letsEncryptCertificate": "sre-sandbox-lets-encrypt-certificate", "npsSecret": "sre-sandbox-nps-secret", "rdsAdminPassword": "sre-sandbox-rdsvm-admin-password", @@ -330,26 +330,24 @@ "rg": "RG_SRE_WEBAPPS", "nsg": "NSG_SRE_SANDBOX_WEBAPPS", "gitlab": { - "internal": { - "vmName": "GITLAB-INTERNAL-SRE-SANDBOX", - "vmSize": "Standard_D2s_v3", - "hostname": "GITLAB-INTERNAL-SRE-SANDBOX", - "fqdn": "GITLAB-INTERNAL-SRE-SANDBOX.testa.dsgroupdev.co.uk", - "ip": "10.150.2.151" - }, - "external": { - "vmName": "GITLAB-EXTERNAL-SRE-SANDBOX", - "vmSize": "Standard_D2s_v3", - "hostname": "GITLAB-EXTERNAL-SRE-SANDBOX", - "fqdn": "GITLAB-EXTERNAL-SRE-SANDBOX.testa.dsgroupdev.co.uk", - "ip": "10.150.4.151" - } + "vmName": "GITLAB-SANDBOX", + "vmSize": "Standard_D2s_v3", + "hostname": "GITLAB-SANDBOX", + "fqdn": "GITLAB-SANDBOX.testa.dsgroupdev.co.uk", + "ip": "10.150.2.151" + }, + "gitlabreview": { + "vmName": "GITLAB-REVIEW-SANDBOX", + "vmSize": "Standard_D2s_v3", + "hostname": "GITLAB-REVIEW-SANDBOX", + "fqdn": "GITLAB-REVIEW-SANDBOX.testa.dsgroupdev.co.uk", + "ip": "10.150.4.151" }, "hackmd": { - "vmName": "HACKMD-SRE-SANDBOX", + "vmName": "HACKMD-SANDBOX", "vmSize": "Standard_D2s_v3", - "hostname": "HACKMD-SRE-SANDBOX", - "fqdn": "HACKMD-SRE-SANDBOX.testa.dsgroupdev.co.uk", + "hostname": "HACKMD-SANDBOX", + "fqdn": "HACKMD-SANDBOX.testa.dsgroupdev.co.uk", "ip": "10.150.2.152" } }, From 36e464df5df2595e109904017d5eb4231852fd93 Mon Sep 17 00:00:00 2001 From: Oliver Strickson Date: Fri, 5 Jun 2020 17:36:45 +0100 Subject: [PATCH 079/155] Add SRE User documentation for the code ingress process --- docs/safe_haven_user_guide.md | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/docs/safe_haven_user_guide.md b/docs/safe_haven_user_guide.md index c60a46b25c..cad144bf68 100644 --- a/docs/safe_haven_user_guide.md +++ b/docs/safe_haven_user_guide.md @@ -624,6 +624,22 @@ They will have to discuss whether this is an acceptable risk to the data securit > :point_right: You can make the process as easy as possible by providing as much information as possible about the code or data you'd like to bring into the environment and about how it is to be used. +Sometimes, it is desirable to bring in existing software into the SRE. If this is a Python or R package residing on PyPI or CRAN (and is suitably whitelisted, where this is applicable), there is a good chance this will exist on the Package Mirrors within the SRE. See the section [:gift: Install R and python packages](#gift-install-r-and-python-packages). + +If this is not the case, there is a structured process for reviewing and importing code from any accessible *Git URL*. To initiate this process, contact your SRE designated contact. The process is designed to bring in a **single snapshot** of the Git repo at a time (that is, the state of the repository after a given commit, but with no commit history). This is to make it more straightforward to review. + +Be prepared to provide: +- the name of your SRE +- the Git URL of the repository (it does not have to be puplic, although your SRE contact should be able to clone from it, which might be by arrangement) +- the **full commit SHA** of the commit (or commits) that you would like to request to be made available +- the name of the branch (or branches) that these should correspond to + +Your SRE contact will then arrange for it to be reviewed. Typically, there will be two reviewers: one of either the project PI or the data provider representative (DPR), and an independent referee. + +Once the review is complete, if the repository is *not* approved, this will be communicated along with the reason (so that this might be addressed for another request). + +An approved repository will appear in the SRE GitLab instance described below. This repository will be owned by the GitLab user named "Ingress" and will be visible globally within the SRE. You will not have direct write access to this repository -- any modifications made *within* theshould be made on a fork. Since these repositories are in a single namespace, in the (somewhat unlikely) event of a naming collision, a repository may be added within the SRE under a different name. Your SRE contact will advise if this is the case. + ## :couple: Collaborate on code using GitLab GitLab is an open source version of GitHub, a code hosting platform for version control and collaboration. From 1b5248bc9d236253ab404d305c747a3c17b1181c Mon Sep 17 00:00:00 2001 From: Jack Roberts Date: Fri, 5 Jun 2020 17:56:00 +0100 Subject: [PATCH 080/155] Fix overwritten vmName variable for gitlab server names --- .../setup/Setup_SRE_WebApp_Servers.ps1 | 46 +++++++++---------- 1 file changed, 23 insertions(+), 23 deletions(-) diff --git a/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 b/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 index 9991b4693b..510e5d86ba 100644 --- a/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 +++ b/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 @@ -157,7 +157,7 @@ Add-LogMessage -Level Info "Summary: NICs associated with '$($nsgGitlab.Name)' N @($nsgGitlab.NetworkInterfaces) | ForEach-Object { Add-LogMessage -Level Info "=> $($_.Id.Split('/')[-1])" } -# Finally, reboot the webapp servers +# Reboot the HackMD and Gitlab servers # ---------------------------------- foreach ($nameVMNameParamsPair in (("HackMD", $config.sre.webapps.hackmd.vmName), ("GitLab", $config.sre.webapps.gitlab.vmName))) { $name, $vmName = $nameVMNameParamsPair @@ -172,9 +172,9 @@ foreach ($nameVMNameParamsPair in (("HackMD", $config.sre.webapps.hackmd.vmName) # Deploy NIC and data disks for gitlab review # --------------------------------------------- -$vmName = $config.sre.webapps.gitlabreview.vmName +$vmNameReview = $config.sre.webapps.gitlabreview.vmName $vmIpAddress = $config.sre.webapps.gitlabreview.ip -$vmNic = Deploy-VirtualMachineNIC -Name "$vmName-NIC" -ResourceGroupName $config.sre.webapps.rg -Subnet $subnet -PrivateIpAddress $vmIpAddress -Location $config.sre.location +$vmNic = Deploy-VirtualMachineNIC -Name "$vmNameReview-NIC" -ResourceGroupName $config.sre.webapps.rg -Subnet $subnet -PrivateIpAddress $vmIpAddress -Location $config.sre.location # Deploy the GitLab review VM @@ -223,26 +223,26 @@ foreach ($scriptName in @("zipfile_to_gitlab_project.py", $gitlabReviewCloudInit = $gitlabReviewCloudInitTemplate.Replace('',$sreAdminUsername). - Replace('',$config.sre.webapps.gitlab.ip). - Replace('',$config.shm.domain.fqdn). - Replace('',$gitlabUsername). - Replace('',$gitlabAPIToken). - Replace('', $shmDcFqdn). - Replace('', $gitlabLdapUserDn). - Replace('',$gitlabLdapPassword). - Replace('',$config.shm.domain.userOuPath). - Replace('',$gitlabUserFilter). - Replace('',$config.sre.webapps.gitlabreview.ip). - Replace('',$config.sre.webapps.gitlabreview.hostname). - Replace('',$gitlabFqdn). - Replace('',$gitlabRootPassword). - Replace('',$config.shm.domain.fqdn). - Replace('',$gitlabReviewUsername). - Replace('',$gitlabReviewPassword). - Replace('',$gitlabReviewAPIToken) + Replace('',$config.sre.webapps.gitlab.ip). + Replace('',$config.shm.domain.fqdn). + Replace('',$gitlabUsername). + Replace('',$gitlabAPIToken). + Replace('', $shmDcFqdn). + Replace('', $gitlabLdapUserDn). + Replace('',$gitlabLdapPassword). + Replace('',$config.shm.domain.userOuPath). + Replace('',$gitlabUserFilter). + Replace('',$config.sre.webapps.gitlabreview.ip). + Replace('',$config.sre.webapps.gitlabreview.hostname). + Replace('',$gitlabFqdn). + Replace('',$gitlabRootPassword). + Replace('',$config.shm.domain.fqdn). + Replace('',$gitlabReviewUsername). + Replace('',$gitlabReviewPassword). + Replace('',$gitlabReviewAPIToken) $params = @{ - Name = $vmName + Name = $vmNameReview Size = $config.sre.webapps.gitlabreview.vmSize AdminPassword = $sreAdminPassword AdminUsername = $sreAdminUsername @@ -266,8 +266,8 @@ while (-Not ($gitlabStatuses.Contains("ProvisioningState/succeeded") -and $gitla Start-Sleep 10 } -Add-LogMessage -Level Info "Rebooting the $name VM: '$vmName'" -Enable-AzVM -Name $vmName -ResourceGroupName $config.sre.webapps.rg +Add-LogMessage -Level Info "Rebooting the $name VM: '$vmNameReview'" +Enable-AzVM -Name $vmNameReview -ResourceGroupName $config.sre.webapps.rg # Switch back to original subscription From 3f68aae53c9ae51400312904b1f06ebb3c0c57ca Mon Sep 17 00:00:00 2001 From: Oliver Strickson Date: Mon, 8 Jun 2020 10:29:43 +0100 Subject: [PATCH 081/155] Update docs/safe_haven_user_guide.md Co-authored-by: Martin O'Reilly --- docs/safe_haven_user_guide.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/safe_haven_user_guide.md b/docs/safe_haven_user_guide.md index cad144bf68..4e115d9881 100644 --- a/docs/safe_haven_user_guide.md +++ b/docs/safe_haven_user_guide.md @@ -630,7 +630,7 @@ If this is not the case, there is a structured process for reviewing and importi Be prepared to provide: - the name of your SRE -- the Git URL of the repository (it does not have to be puplic, although your SRE contact should be able to clone from it, which might be by arrangement) +- the Git URL of the repository (it does not have to be public, although your SRE contact should be able to clone from it, which might be by arrangement) - the **full commit SHA** of the commit (or commits) that you would like to request to be made available - the name of the branch (or branches) that these should correspond to From b6cafddc0d9bce5ffaaf0e19c2bc2784adc426c9 Mon Sep 17 00:00:00 2001 From: Oliver Strickson Date: Mon, 8 Jun 2020 10:29:59 +0100 Subject: [PATCH 082/155] Update docs/safe_haven_user_guide.md Co-authored-by: Martin O'Reilly --- docs/safe_haven_user_guide.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/safe_haven_user_guide.md b/docs/safe_haven_user_guide.md index 4e115d9881..f02cb465e7 100644 --- a/docs/safe_haven_user_guide.md +++ b/docs/safe_haven_user_guide.md @@ -638,7 +638,7 @@ Your SRE contact will then arrange for it to be reviewed. Typically, there will Once the review is complete, if the repository is *not* approved, this will be communicated along with the reason (so that this might be addressed for another request). -An approved repository will appear in the SRE GitLab instance described below. This repository will be owned by the GitLab user named "Ingress" and will be visible globally within the SRE. You will not have direct write access to this repository -- any modifications made *within* theshould be made on a fork. Since these repositories are in a single namespace, in the (somewhat unlikely) event of a naming collision, a repository may be added within the SRE under a different name. Your SRE contact will advise if this is the case. +An approved repository will appear in the SRE GitLab instance described below. This repository will be owned by the GitLab user named "Ingress" and will be visible globally within the SRE. You will not have direct write access to this repository -- any modifications made *within* the SRE should be made on a fork. Since these repositories are in a single namespace, in the (somewhat unlikely) event of a naming collision, a repository may be added within the SRE under a different name. Your SRE contact will advise if this is the case. ## :couple: Collaborate on code using GitLab From 6619fc78707b0d6cbdff451f2f4a826d5738df7a Mon Sep 17 00:00:00 2001 From: Oliver Strickson Date: Mon, 8 Jun 2020 13:23:41 +0100 Subject: [PATCH 083/155] Documentation: overall description of the code ingress process --- docs/code-ingress.md | 44 +++++++++++++ docs/images/code-ingress/gitlab-ingress.dot | 65 ++++++++++++++++++++ docs/images/code-ingress/gitlab-ingress.pdf | Bin 0 -> 27268 bytes docs/images/code-ingress/gitlab-ingress.png | Bin 0 -> 47118 bytes 4 files changed, 109 insertions(+) create mode 100644 docs/code-ingress.md create mode 100644 docs/images/code-ingress/gitlab-ingress.dot create mode 100644 docs/images/code-ingress/gitlab-ingress.pdf create mode 100644 docs/images/code-ingress/gitlab-ingress.png diff --git a/docs/code-ingress.md b/docs/code-ingress.md new file mode 100644 index 0000000000..7737893d90 --- /dev/null +++ b/docs/code-ingress.md @@ -0,0 +1,44 @@ +# Safe Haven Code Ingress + +The Code Ingress workflow allows Safe Haven users to request access to software that is available as a git repository (at a publically-routable address). Requests are for *snapshot* of the repository at a given commit SHA, and do not include the commit history, primarily to make reviewing feasible. + +This might be the most convenient option if: +- the software is not available on the PyPy or CRAN package mirrors inside the SRE +- it is anticipated that the software may receive occaisional updates that are desired within the SRE + +See also: +- [The section "Bring in new files to the SRE" of the Safe Haven User Guide](./safe_haven_user_guide#newspaper-bring-in-new-files-to-the-sre) +- [Provider Data Ingress](./provider-data-ingress.md) for a guide to bringing *data* into the SRE + +## Code Ingress -- Workflow + +![](./images/code-ingress/gitlab-ingress.png) +[pdf](./images/code-ingress/gitlab-ingress.pdf) + +Key: +- arrow with solid head: connection initiated from tail to head +- arrow with empty head: something else +- box: a virtual machine +- "document" symbol: a script - runs steps indicated by arrows of the same colour +- "folder" symbol: a git repo +- cylinder: data + +There are two GitLab instances within the SRE: one visible internally, alongside the data (normally referred to as just the "GitLab" instance, but sometimes "internal" or "User" GitLab). The other is visible to some nominated code reviewers (referred to as the "Reviewer" GitLab), and is mostly isolated from the rest of the SRE, and in particular is not accessible from anywhere in the SRE where there is also access to data. + +The ingress process moves code (a git repository) from the outside world to the Reviewer GitLab environment, then from there to the User GitLab (after approving reviews). The steps of the process are as follows. + +1. The User sends a request (out of band) for an external git resource to their Safe Haven point-of-contact, who either has administrator privileges or can forward the request to someone who does (for simplicity, we refer to this user as the "Admin"). The Admin should be authorized to clone this repository. +2. Admin can perform a preliminary check by themselves at this stage on the suitability of the software to be brought inside the SRE +3. Admin runs a command (`SRE_Upload_Git_Repo_to_GitlabExternal.ps1` in `deployment/administration`) to create a project on the Reviewer GitLab containing the repository snapshot. They will need to pass as arguments to this script: + - The SRE identifier + - The GitHub URL to the requested repository + - The name of the repo on the User GitLab (which may be different from the basename of the URL path to avoid name clashes, since all requested repos end up in a single namespace) + - The full commit hash (to the requested snapshot) + - The name of the branch to use (for this snapshot within the User GitLab) +4. A Reviewer logs in to the External GitLab. They will be able to see two groups: "unapproved" and "approved", with a repo with the above name in each. There will be an open Merge Request from the repo in "unapproved" to the one in "approved" with a single commit, of all changes introduced by the most recent snapshot (with the same effect as squashing all the commits since the last approved snapshot). +5. The Reviewer reviews the code using the usual GitLab facilities, with an opportunity for discussion with the other Reviewers, and then indicating their approval or disapproval. They indicate their approval or disapproval using the "thumbs up" or "thumbs down" reaction to the Merge Request. (We do not currently have Merge Request approvals available on the version of GitLab within the SRE). +6. The merge is made automatically (by a cron job, running every 10 minutes), provided that: + - there are two "thumbs up" responses to the GitLab Merge Request + - there are no "thumbs down" responses + - there are no unresolved discussions +7. All "approval" repos are mirrored to the User GitLab, where they belong to a special user ("Ingress"), and are visible alongside other user-created repositories there. diff --git a/docs/images/code-ingress/gitlab-ingress.dot b/docs/images/code-ingress/gitlab-ingress.dot new file mode 100644 index 0000000000..a45be0fe48 --- /dev/null +++ b/docs/images/code-ingress/gitlab-ingress.dot @@ -0,0 +1,65 @@ +digraph ingress { + newrank=true; + rankdir=LR; + graph [compound = true, fontname="Trebuchet MS", fontsize=12]; + node [shape=box, fontname="Trebuchet MS", fontsize=10]; + edge [fontname="Trebuchet MS", fontsize=10] + + blob [shape=cylinder]; + + {rank=same; blob, win_rds} + + subgraph cluster_admin { + label="admin"; + color=lightgray + rank=same0 + SRE_Upload [label="SRE_Upload_Git_Repo_to_GitlabExternal.ps1", color=purple, fontcolor=purple, shape=note]; + } + + remote -> SRE_Upload [label="1. git clone", dir="back", color=purple, fontcolor=purple]; + SRE_Upload -> curl_zipfile [label="3. Invoke-RemoteScript", color=purple, fontcolor=purple]; + SRE_Upload -> blob [label="2. copy zipfile", color=purple, fontcolor=purple]; + blob -> curl_zipfile [label="4. curl zipfile", dir="back", color=purple, fontcolor=purple]; + {rank=same0; blob} + + subgraph cluster_gl_external { + label = "GITLAB-EXTERNAL"; + color=lightgray; + + curl_zipfile [label="...", shape=note, color=purple, fontcolor=purple]; + zipfile [label="url_sha_name_branch.zip", shape=cylinder]; + curl_zipfile -> zipfile [label=create, arrowhead=empty, color=purple, fontcolor=purple]; + + subgraph cluster_glext_gitlab { + color = lightgray; + label = "GitLab"; + unapproved [label="unapproved/repo", shape=folder] + approved [label="approved/repo", shape=folder] + unapproved -> approved [label="1. merge*", arrowhead=empty, color=darkgreen, fontcolor=darkgreen]; + unapproved -> approved [label="4. open MR", arrowhead=empty, color=blue, fontcolor=blue]; + } + git_repo [label="git repo", shape=folder] + git_repo -> unapproved [arrowhead=empty, label="3. GitLab create/git push", color=blue, fontcolor=blue]; + git_repo -> unapproved [arrowtail=empty, dir="back", label="1. git clone", color=blue, fontcolor=blue]; + zipfile -> git_repo [arrowhead=empty, label="2. Replace contents & commit", color=blue, fontcolor=blue]; + + cron [shape=circle] + check_merge_requests [color=darkgreen, fontcolor=darkgreen, shape=note, label="check_merge_requests.py"]; + make_merge_request [color=blue, fontcolor=blue, shape=note, label="zipfile_to_gitlab_project.py"]; + cron -> check_merge_requests [arrowhead=empty, label="invoke"]; + cron -> make_merge_request [arrowhead=empty, label="invoke"]; + whitelist_log [shape=cylinder]; + check_merge_requests -> whitelist_log [arrowhead=empty, label="write to", color=darkgreen, fontcolor=darkgreen]; + {rank="same"; cron, zipfile} + {rank="same"; git_repo, whitelist_log} + } + gl_internal [label="GITLAB-INTERNAL"] + approved -> gl_internal [label="2. GitLab create/git push", color=darkgreen, fontcolor=darkgreen] + + win_rds -> win_review [label="reviewer"]; + win_review -> unapproved [lhead=cluster_glext_gitlab, label="review open MRs"] + win_rds -> win_apps [label="user"]; + {rank=same; win_review, win_apps} + win_apps -> dsvm [label="user"]; + dsvm -> gl_internal; +} diff --git a/docs/images/code-ingress/gitlab-ingress.pdf b/docs/images/code-ingress/gitlab-ingress.pdf new file mode 100644 index 0000000000000000000000000000000000000000..822d696caf32cbd63c393a4752f633e84cc90d6d GIT binary patch literal 27268 zcma%>19T_Ly69t1>|~OEoQZAQwr$(CZEK>5ZQFJxHYdj9W%fS%+;iW%cdgf5tGYjQ zRrOc3y4SaUMIt98LQPM@3`5d*QS}PL1fT=h8d$(^aRF$hjBQMv%mB=vDg_t-06;5Z zZsla`@Oib;cQO_-HncS|hT-9XaddJp*0+Xn%j(pyw8R}nd*A9V;0-4J>C%ah2x3VW z8R$xo0^*Mt#DzV+mmS|wSerdrJNGW zu;jUMmioiGdObC@ytB*Js;61ys7NDGpE=>&;`dtVgYDCm;;(Ao zy+n&@>>q2n8egq(6f7$(`Sr`{MMjZoHY7l7@iJFWcROa3qN+HQEmCmEakN3YE^!poys$`JzVBuJYUb_mxOvGv zs@{Ho>t9#@K11UlzUCjQSFM16Pf$9Ln8D^Aw0L=O@akil9wzNIVZHFxZP}&>$>B)A z#v4j|EWGk@>w3E2p2bB&ydQ@*3QS$ajLpOhI^588c7Vu3UbEzv3PPmCff3nnYxMr# zbJ2!h`h)S%In2Ip7|Gs>nNA)j%%Gyp*FXoK7g*_zz8k;VvN_Dlu5a7h6UJ{GWECO^ z(+G}p&I<@@O^vNkU~!8bzQ4G{Nn&^i&kVIkb+4L#4J<82K(vo)5kfT&)KXQjl;lW5 z;fQ}*!Ik3Xv_e&Y$nVazpX2o|g{1u|J%r6JquqYi#@_q@?Qv+GX^`qik;#I&yi-d3 z;%y}u1Q%^t->XHvQBxCY)eew&&EE4hJp+56v0kbxIfiHIxuIGjrV9-vVj>BJ-?=lJ zs`!3O>u36{=AQbJT_3MaYWp6e_(P-Zp%6W{`HX$uV-^_oGMbUNHE%eUhe&-XYpWPMCE#>K+*d0ex> zOF$SPKY4p#2+s%HN;N>P`Afyk8?=vPqI}g-0xKUKO3ou|E#%eqq*DV!Gr-I$qWjiq zlu^D;domxr^v<1ORewh<*79ln>X=DtgM7`*9kZQOrF=;cqr0^kJ7V+(J#odz}5+6hN-l zA~T0C@p{vKzMd43?X}Ox>lg?mPjGp4cfMXgKmFGB7ZOUqyCtJ&^RvVC$J%Hm(okdD z2m)4dh5w0(+AjmC4(2r)uSn3Fgbmb+w6U<}1ZMymbc$6;F%3M8s!M(-JxEZQln2E^ z%l*xQ-V3O?*q9g^&tsf7?Fw{pv*8tGIWa)DR+7Qs3@!I#141sZ-*aR5;SS!1;FVF%A zl3$zPZ3OGE;GLJEXfmSqmETFDAhdi}*H&~}J}6kJQ4lWK83`oCMoJh_z$P&s@o8yo zXjsQ1AJ@k-2Z!OII4DJ7!EYIy=?Qmhp%@shD%yeWh|MeWbZJuhOr#R%>}2X1#qaUD zipwv^0&{aRi$lGp4ca?4qVG`n!%2z|!KwQ05=<;~>;QpqYW48(v0}Oqdch?#H5>09 z^+qM?l5;K!7L3-mk#+|^E!r~haFt3V(t7j|w|ZUW17}+$b^Hj>LH*7Y6jDdISIiBa zhCLAjl32cM>R09<@<-=i!G*a)7mzB*5V|?_DWQrIs851x>ko&+1=nA!Vn~R)qoQ(>x=O05*Ft8S6-BQ?qW0Dy?ioH7+l*L*ed? zachs5uxJqvsJRi)2;w_|xe}HiT86;h)9d}=S6%@4qB}!y4x5)LBgkV520!E;7;k>O zNYbjQu7qbe1~8CC33~o~4}U}9HQoaXrXpAbp=%haK5s9<-X0ZhggVg5PYL31c##AH z-t#Cn+GS#Ln~?+soLx5Rtew08L}rYARETKPoUhx*oNsdym(P`Q`t=(+v>#p(Lcy_O z)&SljesP*$CiRnYeW64>d#^gOH3pWSGXq`>iH$Ilq(cB<3aurm!x|z&daCB{%dsSo zRTFn0v|9B6Na>@c>L^JKC#~jKzk4@dvOH{ZWU?Ve3P3=jrc3QMx2SCoqh3!oN6s0T z26~hn+@y?t)a!pCl2wRD1ac5_QYI*hgus8BRmq{(wJY=ry5z9&(6!J4hHn)fzkU-MY zgIcX4n)#hJZt?xeYtPIhZSaUFl&uFA(~(2hsZyzAfW`0&s8+NYbKSwdFdA-;1RrHz zT@iA=I@C^ni&A;xp{-Q5sr$jqrF;6>6(x=-=N=~bL6sW@PNC8-rmh)ojsek_(!=uB z7O`n6yde_mSg`B!cx6(jpVcI5yRnpT)A=Lp%VS^%LiqF2FJ22HS$6*vh zLSt(|CIh{~7pyVc{)yRU(VulN@nJEMax&u2#m@0VSa#zXNXh}gXxdx=UG5jS*Iai!9|t;THZ61sef`e3~Zzv!oz zM*_3qaG3CCBSHxu)dHoMrWGEs1Z+V0G3ocWIJ7C; zXsI%k5-z(5X()qpNpb!4vlsVG|NPlg7Q+s-iQoT7)Pi+ zdCv>SW+Bmkd@C7TI8lwx8(Hd-N8zlapGy=-`TcSq9q5_T%IA1}D$b|RS!AEDbuwOw5~ zd9ySC(Yz)ML*13|)@H2;P7Z4=K#HT!4`pIHWW>d}LL|yrcG-h=w7})|Yp+;Ytwi_U zczH6I(5-BU+E5U(!8h@TXnU#aOdf;5l%bJVqyXX@5D`=Rt!yrj`gf@+0Vz@Na@!!LsAKgk$cPu&SRD;7jw?<{KnI{4d zi|?tEYN}a)X63e*ivdw(Opxti^~MP=77!sveU%Nyx=3>C?Op;YC>ar8ho^!|T*vsV zv2~84n@KZjLk4G9PIgsd)*MzzJNndM(~bdz^5UmV8j}?)D!JmR@d+xOL1F!aybfh!>kPS3jACd9j`N&@xh_IWJsG`$n_5tAH`lI+fX!Yqn1puueKY$)UYpDNOf952ghQDiC z5nCIl|L``UrlY06zL{As|1n*LM8`rlOypDMP0RG&?KmVXV2;on07&?>px83SnL z^i7QcBmnxqhA5}+VEox5{XfsrN*f!Q>kHVr0W?2VbN~iAW(EKw3mriFFL%Mu;eGl$ z0{&!Pf2UcZ4z|vA|1sphJNZld&pRuAcI}{V<7oG%Geh@()Zzdq2WR7dB*9NFA!8SF zLt_O|fj`QBMMS~a(bn0)(AW|1C%Ov%@oMa!^8c0n(f=9q|DOK+iQnIOB0Yefj)8^l zza@kRY@S|9i|I#MuUU_s=|(1fU<|B+cyxLK0%8J+tt5=2d~o}~CLlplZV-{!Xu%W~ z=WF;(6f2G3>uUm<_zUH5hUf`4kxLo_kSZ0z>lT*nsHG(D^1dr=Vx+I$rY3|f)~?O% z@0M2+tlO?rtw&i`URR7k$O4r{eo(|UFQ;3-b+w)wxr3fyp!CO8wi~|LRWSJC6C^hA zwfky+yj19VUTE4PD%fQpn%lh(O;>vEpRi}@1rI~rJUsrm(Ec^FJDiHF5ajLnlGpE? z`3BaYpCxtC?lGUTL|ef=;{(2?YSrQ5Y`b}o$}1s;_&{W=`e<{4(NuGk|24!V@8hNM zecmL-Bd;?A@y5Py1W5ZETo|RlW^hs(-U;xI<1~ZY`h@fp5)m?A+lvK>qe^%2AxTi&Xc!ZKrL$ZKiFr><=wy!KBnY(sBYv#Xw_w1*^kaO8!s9sQ0qLe3GwU61a zUHz%rudV<;mGDG0YSX+&I~6B!2%|(!+P^1@z^*4qJRt#DiAh2~C)$J&T3 zR6&zlF`*1=8s2*8dg1t`J*I8TDcfZ=9QLTavl0v1k>c`TOr6bm6_r%VeEU0m1yzZH zL$XKxSP*HWqOs5*R%>fzI*OYCKn7tapWPp_hhYA~Wqh*S0-=e~(^IA^des4y!FG$w zS_xnpZWT( zLuJ3YyrX?jyjfzGb^d+XNRnv-MV;w!)r-IBc|pyIVEwUFO(sv@WjfTcEL#k5`RyuQ z);IZeurbyP&#KhjONosrn^-16_Iny$cX&A=b&Qv%WMUu(2sz^L^;5eRfbVUzAzuiW zqAT=*V1WXWObSP0U0;;a@4+@iu9dIDv`P@1)1kG=5yLvs{0D9_R=l#7cuo-IXrdif zI!@?BwBXXU^XCm$*^1&txLeT4i4#V_z zSG?G!28Q2HKJrzOFfJ^w<~aDw#*h<`6Kv8kzHfO-%r9^%77Li!LpFQtf`rt}!3}bP z+NkBFVSON6$^=hsC&D*qFLoCBVUSc#1rT~x3k~^Du*}kX7fI|`;F_gP3*<^W72n`g z6-jq2#s(~4bfrH0yH2^`nNKDFoYfH$ z@y`Udg@$!+Eo)`4vFkuaaODjKR{JPCk__{E8ewpWd0QIK8~a-{QE+2$iXV;;P-< z2|VAaEq0{IGWqCobFX~qYd3ga@^tm7dlwz8UB#U5W%B`WOIit$@p2xs$g|i$@j|v< zY!T+|UuOxh06^_?UIB<7HV|?;0hbNUR!iSzg=rkCj5bFH;cVyzNTS^diOv(vmls=i z@{s343Xk&q^1U|}T4F{FdjwYLjmuRp}c-ZsM_AM+M1gBmOu^TgGNr-g!XJ;LP!8Ha*E zi^RnR>*iqB6}p}mY6=#sTkP!wi>4-5D3G4{5~Sfz@0p46Bc2TLd+^K8IO6RB`x{z2 zSlHWM?{u0fXP*LOj^gwC9!|wOBcN2 z6g>#$m*LeI+(Do}tRELzK@X%dfHabEs4A||LDhb^}R)@QvY_Fq^bUT(* zikX~7L14Q^o-~bN&*H&Ql)c~K{~dEt1kdx_?3CdKUYz42pZLp{;&%g#ffUD{??PRA z9!j%hb%nc`yXH5b^27pZVbU<-s{C32)4BqlRJ8l&*l>=)bkbEjX$F5_a{qD6b$FU1 zQuQ#C0hk!yTcUgmGG@A|GZW=JvFHwM(N!A6v(OQ`K#R06!8!W4iew*$pwge(T!caKf-yGeF47~3I+B~Yk({MU0)h}O)_ssDMph&iVpd3OQ1=kxAkI#p z5(;4a4O{)3@8dJf)Iv``DnTtj zqf4i(`bxl&gg}rk2=Jo-_4=mKuzT$Z{;br**ii68B)4d>45QfasA3?cR&z!MtO2D~ zghN1UKs4gm@UT(Q9WEIVQqZ!>W3I+Q0qrD-XDlE0+vV(i$$LrvO#Uo$X?_WzQpHlI zNF|&?Y6nR+y-$)Ffws}Dw$QAjjbeAs#cW4hUjtJfS4Xt0aEO4TrT9ieYh3B)Q61V- z=Egp*dg3@-G4%YP<#id%{hR^?Vn(C>XW`KAPgC8Ch^EhDd{LRoElgjkLLC}Q z^2+k+`SesOE@KWUg&WFB>&?7p4+&>Al@u408rqLr3wSX`i=b@PT0oIa2s{Llc&+7U ziD~Bxc8dal;4y^CC>(sDlr0LClTX&shSJJsC1w~9mU+U)QFwTe+IMuAIm$b8thqA7 z*EYHIv0oM_blk}Cp3urJ%V3nISt7sy6h}&e9L9}h$qO#?)4%OLBFG+!d@?h5t25Dy zc(m%>yeA*=e2*dL!uE7?)tYH-Ul?QGeikL=YS83**jpK$E1z-euC{W26@8fbr8l zE-qp1=5&4@$HV|;8W@?{qVqzNb>ZQ-<3zb{2T7tS)P^NSS-)Z=EN6^kMa{<+^dwI) z&ybvg1S1|W8lA9aMi9-iM6PB9FJ6N(SR~g5Pq|4w6;;SFG;BZX+iD52g{V%q547V+^ui(D9 zLdNs$Tdk|3rHT#lBB>QUM88gn4f?OcK$`WZ4h|xJl<~nv>VpRoiS(gpDe`r&1_`9j zq{pJWMf*D9L zGM6V@94A5fU0@QrOXwu%6&0FS%DZF-R@8bSZMs3!wrIQS+KGwlPH*=b)uqrG?i5GW zV({ns@7iQNOvN9WT~6a7Hox{v2d4ceHIGpsNpzc3=mW*n^AU|f^cfWU$|uy+d}+qiv}odk?hJo_5y0D};*Zz1>Y=fr%n6YO7-6 zXEPL>iw0>$%BrCB1Wlu)u94((1Bp;-xfoL2R>!HCV($0))a zC8CW&qz!s^cUcHtKQb}aX_GEovq>&TmoSRd+abv=Cec3f@neQxW~jX-r1ajNpvcIB zG#*=>KQz*@c6Tc)zG0{cN>w*bS9l0GIWlD2xVJfUpcA)mIGS;3)E=$<=Fnyl)M8(x z!#U?Ie?P(vP(m1r_1*ote^WkxF4#jWn8bK^)VWIyb5-!`(owY=9FM&#L$?rC4*z!B`n>D%5wk%WlnXASn~s*HM{dZRl%)tpCJ9TlUQB+OdJm zxOn$`hff=&tAy2^%+OJFi;VMe2z||vdGn`u!VILJ{BHdn>_bq!c`_}+O)P~$^V3$f zY|>m2&EKf1&P;OCm!MQitEQ$jQ(3;!kKp8K7L1~)&W+5D<>R!O?1hy@ZWcAAGpQAp zG@+9g@@VSF;3A_@Vm0)5Vo3ui2I6$s+2I#@v`%+A3rMnHc@;NvuE_puzSMRnnWP=9 zmxi_(C#;jWcQ4wV1&JVyJ~Ehm?h(D27KEn@GjuUg;_Q`4QhQj@Mf406Hr24 z-aC8&L!j|Od(z&4@otz)oO<;vU^q(}rc(gON)~`MAIVt2JEYm_K@v(O6~iMCQ8{R{ zG}K`H7IXMglN#|kYZT3vDE5;EbSpvrbt4E)oT-EA&z`97kXLw1Bjs)*U8F4~uEJCV zcYQ_t1Pp8~)cK*v^8q^p4Vkh}KSwN47z1|12vLIuGuFvi@}SGqU?Cv3$$Gr(?k0-4 zJJa!6)jV?n;i6fHC{d>7ARWM=c0B!jI+xfiqZDSu5Nie}E`F7_WZ~*~rxG=6FQjeKv(0#Pj2%3%OeIDw=-Y7!u^%~(;4|*-ZbHKF%gJ^5}wKzy~8}0;UW5W8aDEN zGxqbPb*9&gz$+#7zjoTQ4M3m@YEsGLoy+b+9DUdIivc>-6BLb-9hH*YXCW5zayC&Z z;6fJZ0*UW+W19q2p;<*n$Ccl>!cV|IUJx}DENcz%9!Fc`1)fd`vEk7fh=DcDcwd zzJLY@gWks8D!@wIDt3Q;er2mqCTGB;^|*o7I#w>OQsCs*z!jWIY1jCkp8;UNjtsr_ z7LF$0J#h$b;ow;yK%)b1dbf6N{FQtC&~dqDHn>*s!}pr#5qGYiT(54)u&*v9%x-u% zFqRQ$)EF_(!xmp#<(p*KECYJ2N&Drjhxnd>gyNnd1Drh9ffH@6)X;JKGbn1j#i>Tg zEyJd-PBX-zs>$Wj)^iYZV}}VyHv|bp93wRL zX<@r8b&=U5o&(@BTSBwPMaRX5Tw*jjF zQ9-If%zz_;9)qwW7%_Jj9x@ni8*@f!q+1kwPi0y2Y8gQ@`nz|4TnKp_G{fx^va z>^D;6v5#6JDw9=~f_hL&=*;>{Y5I)|B%E)!q#Q5zw}L`p>P#Bn4vl2ihQ1nn@8QOtrqBeH^T zqm)R}GBmHR2y*VatYSVBzEM1as~Gn7|A5LdKo z*EcrXG%HtRv|Z=3T`yZ5KeA;vpZ7S6v%NiE9v-1@zP#hCOdU8KF|2KiyEa{#Z*IRR zoyBSvXIp!&Jv2jeu0J+h*oud+q=NqpuzX$6u2g7o|P z`Q-<%0V)N8@q+^ZhXH{Af#v}ns(^G~Jt^(|&{|}>REo~!P}*~Dx9BiYjLub4-cydW z=qM;3ibY#=Q&)*LOt9!6E+2{vu;}Pf+H)_*77e!m%Z?}X9H@LqNDTA}R`6V!Ht9?V}fr{x3h6!Y`C_ zD~0#EK6xdibp&x$aJ$+&plC%OqY?`Ix3AvstkXuj_P|5?PQ#~*ZjbI59rh)N1`j< z*_^GXRRnK8gM5Oun7^pul5a}l>%2k}NcO%iC2ap#&*}S(nybl2KHnObGGS#8-PybH zilm%<^kt!chWzd!x0+32#F%c|Tj}1Ae~ybxuripr9<6DDYzKuAhNzZ@m{gDodon}f zRw+vqr~aUt2w!G88@Cg>tusRO zQ+w0YvWk9AZC+rW%ROOg&-+Lp9yhs5*FG;z<$su&e;C^*USgzU{fE5K{}&DX2fh8F zdqT#Jh7RU-PPPtzAlF~OPe$MRlgo*TN=vJ#P%1eX8#o)989PZU{@X!B-`d>D9YFqX z8$ep|+uv3zebY}e#_(@ffxp=uH5)U4nt_Gk6LYaK0N9xr{sr*Fo%F5D4f$jcePYct3KF7$;^7jJxT#2^I zHs*ge!vg;La{MPyrT-uH)6)NG^zW$sPaA6g#Q7hh%0NfY!uH>Zs@ET)+Ii)1zI^U} z;V!FVMEVI-Y0i?hJ0hlP2Dtrc|vJFde<$AH`r5;|&!3y{U#fX){ zmPiWB-sZC!H*Ow?rObG-7|Dd`8{4)QSa^-xkB-xwN3pN1f=iW+x|Y@Vn%(!W-oCN^ zaspv8OGq+=57g`T{glBtg^~KK&UZ)Ta#>L1~sdZU@SgoK@a?(fA$0BMo~& zBRCvY&qKZ;U3e6Bx(x*Wvmzv4TJD638v)Rj4>aP|l z&A>6fpu?}#tv1hf)wC_GS#jx){Wr_q>aA_w2WhY&0YMWmD1_XC znChabx6X44m_H;UCi318KTJ;ul7qxRWRUI&U_yhEz91mX)-9ozl14qc7v%JnlC{Mo z?l?ykK{@e{=>A zNnQp7A{-6XK9*Nao~O*gK)#YM->{_la{O6mvwb1)<*8rtYMvFntHb+4u~zdmb@Y3P zESgT%%jC^M_`**k9v8QI`d`nymp+{ta&_+eL?s|Um>ukBXGL|8lV)D?{eyZ!*wICQ zoDy`Z$sRX)6mI3ao@JB=B^d-$R_YWJXh>}rAm_gm;fKTL-P9ffVdrWv6(J}1ORQjJ zd2hlIAem!Ai+PLjVGDs6;G4v3!c(u~J?Bl7RvB9wHh#+}Vk&is5W=xAf7f*Qf^P2ulWocH988mDA+jvo^-DR3j-5z^ zp$x*RKvO*TK7d^$i9$g%LyRmQioV8ow{J=u%##%9JO!*)cDDyU+`khE@sYo8&>;*hG)Ea#I-TD9qxR1QSaA%td{}AP0wa z%R7R|2+-u~Qp!R+C!WzN*-6j>zT$Vq{<7_P`=p{O@7M26dzoNQSNHL;$Kn~_XYytt z5$*xYw<0t*wo9uSR#7Xs`RYVCZ}g2E^|oA~Q2+LImpbENntb z)b^<2BqQDk#pKAcO*jtd>Wa?As^`fJdU0E1#JnM^a214@=2B(Sj9cIvV?;zl3)cw< zt{6j6dyC@;ZQS72>?-yZ+Rjrg-y8zocP;bmmF^_vD#L;&?_i@7d(2w8LewmOQr1gU zR*){S)i)Dw{#fmIaha1jJC}ZtdDUn~ZAx9KI8PT$Ua^ln4MDV~z34+y@0&vQrA^o2 z%u#4{jBkQ6qH``qw!*#IuglN3Fr@3Oq^|I*q7G%Wobh5})TSnALZGgNDBy<-MVX4(iU&V_)!|*ST;W~u zU(r`fbr>AEf!lf#xplKfJ$j&RFzVDgcIXt9FZ028_6K_u=PW#{nMW{5?rzNHaR|yB z3Tc4bGiTYX<;Bz0AkYt0c=b@*(Vf)9#S6wOa?J7$d6n+zpkt!U@%n zeGB}ups=|X@DbN$fh2JOBylYSg_)s*O{{^pKTSQ^Uj5L!InCR88MC_mF^~Vr_YBE3 zuNiz_t2)AcpH^Y3seGN-sb3P1Im{vEUeNRIyTy$MB)ga*-wpOReae?B?#o34HGVAp zu%0i%vyOF<+RNw%ikaoBhE^{&9$;pnn*0u}VMg9%A2Q>{?RZ8eP-))64kD%yy@3Ci zbH}iVbBpfDep6sDu+_2Bt%j#mN?$7ke~!KciXnjlOA?0Po9^&4@~U0S}|c!V5rX1(p+fW!$stqm-v-zaotWZS#_Cl zuGjPW>-Q?Rd-n&zIU7a-E{vxI;T%TMlajAwRzcCohCOlcZCeiXsRk9Qr<}^=CgjB? zsF@xjwPgZ-O87Xrva%8yn_!16hQinBv|u*zMeiyK<*>?3Hx1J-@Y3?mkPfI5o^(1s zT`7WMfCk)f#a_-DT1pTgNCB(s-LebKhl?}ZO0zx7y=h;^C*ZPkQCbiE7!tZ+GCy9c zeH}-OJ{jgXqNqwTP3W`uWANkv2>nVn+Tw{v_hFTELqcjX`E;Gk}#*g&<& zKcXPaKrPw4$uwEs1brrBz8-k>#)y*-AN3NxYi%L4^$Py3SvQ4=hQM)QWr({V&GC1~ zWvBhIAQMgv8{M5Dpy3!hF28+|KR~qe;*Lp^BWk9FK!aYD4XKKOA2=kh2m0MPG+Kh^&efrrW}2!xN$lDBOVad$5?A8rPX#JZn5kzre&U7O6A}02}Bs#o-qN& z2i<;2vRT4oXuAAktf}z>a4b@gIg=K0Ws#Iq z6KGaxKKX3t;{3<)L45JD!fDGoHGh`4Ldw zP_Qf@n4Ek$E0O}{&R+$ZT7j5VQ&VsGlD{A!G_PvIFVX})H7zf*liNf2IJ?aQ4eqK! zUa})3tRNxzk%Xay)_>@m5P5&~DXQ*Y%4@~msO&wO*^G!)Qy2syksY66W{d-YIG?M&OxX7Ehh|sC<;?nkwRrST>N$rU9s=FwfZ^v5*`#7-eV8PPB!rgQSDH zgUpN03YKU_@FfUo$WV61P1Oq4>`mM%(#}LPh+2=&49-O^(jluUBp3<7A5N#vLen*N_w&VR;ja} z%V9bCTv`Y@Jm+j>K9ullg8H|^6HMmn6qY&ow&=@zhJ1k$Ih1OBoFJOy%sO)yL64(6 ztD(phIwAu}=+*P9Yekmv?vZID(C^f&Z0A0(-91}$L1Gp+QZ)6L{ZE}MC_lnXG*q)U z*{DlH8Pbq)aMpZFTE`MD1c*f631HVs0qYTw6wzP~NMRfrc@!2-2KMp$T;6SlHzoL5 zoYhV@r|r6(N4fazkI4v^EzHa>P2)J{Rz90rq}?-H3ojRQBP+JHSouw&teZZ+ek!#U z=uc&wozVt*AK*QU{pO<}gMT4js~TqXczBAftxc>wNoC#vx3u2r%QVR`JBb=cZVcR2 zVG>rsb0B=2K zncsZx+|uhlNJA(KvOqSL&brmWSk#iPYOK1bYSw0uKOB2#HH7|3HeVX(F#JQPOIbmg z=7i&dutQRS7k0j=W?;mc<%A`gUQ<R23$xGZXuHhrB_413q{ex?SlV8@9n}xwKw0dG4t6Y-urr9I1EX~ z5L$g&1Ejio$m{WSfFf5?rhaZ5xnJ%^l*=o5*&}Al9Nq?_0HrCQ6jHuiwA4ep)PwrR z)=s6|Rja^NuKcaRbfk#JWG>woCw{jEk45x4v~{-H9dS+<8x#(N7+#Fsfi^&tRsNgR z$`|LyoZx& zMYZNCdOx>4&*HKf+r@2qdqSkJ8WO6Q`lXt$l2247hOW$EKQjnr+lI6-SA=jVpFhID zx6cg?D2-G*sqNim0Igm$Vql_^C43R}+&Ej<IlQ;}uNU5QOPE&;JM zWBq*3k-Wo1)G*j$TXF^LUTYTW{6OWt42#wq0@9Pzhz|#6+w96 z&+u3sHj@OFjsfyLh!c1jx-{5%pb~0_WQAhlVKLpEQt&3gUkQr8gfb+#x%YnRX{6KYjaz3`|Bp?W%K6nCj9Qukv9Pg4$h&OSL!;a35FdK)0fok)(wGfA9*Cwm! zn-wP4&EPekIgU4oWrUdoP!$1aS_KInz7@CZ8(g;X7rh!>Gx`TmB8^zsP9Q2I+6JnT z*_!w$HGhpL{Q$k5Jp}NcfLa&yJbD+SdtIAN$zSlQx*i?X)>}mwI3I6A#+FsFwAkKP zm#UYYmnG7kOCLXV>&q4%#7-7iV~dwI6_+@oe@A}&ez4vS@n*K&$JC|$h(VoQo`0o~ zq#khzwjb2e(V|!NKOIYf@h0e|i?~VJ79`fd%sAPZn-IWB=U_rJImxzy8|qBtSIMJh zP-Ag(S>7y;E-43|Ko=KTBhp4Pugtz}OSmAZoEP=BP@on=bwa%kuu90TbeP4%AgS85 z0g_EFToZMU-c5cUeS3JT@)CEIvY&9!x@egtP(_u4e>{D7lULBs?jGGf(LGCj%c(bj zk7o{J$0dWG??*!Gx9F9?45X%$A(*SI0i{{<*^^E(MSs81nEI#ywY7oZ&w?jH!@xi@ z4zge4#A?{DMWt-9GFfag*Ty;Gap*t1+-v_3Jqsq0ooFeyJU?f)Wc4MwAN1@_+u|jy zwRx^wHz70WSok%t?wC87*p;I}sK|nqhZbX!>O^ENTj9BhX>!fJSqR zb(W2!hnGnK#4pS<2(gwg>b`|tWF#%C4bgZM=pdSA4E^P}pt7RgW@1+)#lX?M#g=qM zT|JBRGCX*+&P!tLX?EL3w4V1iw9GqU2 z7n2@)cY(qwJId@{x0tu#i!0!1LRq|}QVl#ZHrY{|udGqf$~V|) zbA>g8tg$Em*mH^37~V0qAlcm?Gj+v3@@S=ZTxW0_WIzU$O%ur>#YN?aw_=c)Uce9W$H*rDH{KPE!vtsRzbJ+K$dr z(2Ty6;B4SQw#qkTWBWS5DbT2`f%}Q6ibOAH@B=<$#Cq zuV85Y>%yo^?%A z>BJL^QQ}$FrSNmpgZ1TsU%i^BP!sJJhU}A)4vtHuaVm}mt*6rTsrv(m{N1pZOSo~N z)WU}mu6YSInj%mm5fMb}i#*Y-o-bz?j+A~ci>7vqTxF)dD9WBTmkl3ZG(3R1_a;&* z(iZVZ7VB6SMstQ-m8$dM7r~wwJK?~&)2snIpg_l|23OByMG$aLVm#C)AYRay)`HKF zdhA(mh{Q8BdBEOh4fxgh=?h_Gok)w~^KK@aOvnaF5x0Q!AqWJh_^H#G$N!u}Mv57b z6OL(7Ijwl0Z%!O7ukV$_X@FR?#JKb}gle4dx7gzwS9dKKEWEKmw1YCL)C1@9)~&27 zU?6ySOm|b2Kd@xo?ubP%-csM-ag@3I^0@JU`|! zH+76JXR|RgrQ|D@E#dUH`kR&8Ga~_pt4zYA0`{^gs|f9b41?6LO8YS@xyi=(*1?CM z&ZRB=tEO?6?+mIYj%REn8~jcs>kOChlp9`9;HX~hJBHTGPeQYy`t0=NkHuTk-p_dN zDyO}IxpGHW3aqnV@2VD}t(fY+Ulvn_^5Xbl9MS=W-2b|b6e6;mm{D6XmPj2NInby= z?DD{7_OZa4$RMJ2m(|1(M}c7KlVsEkQE;{ohSz)F=l!1V&u^YNbLOnG_v~0}?{(%p=ePF8=lmG5FT;DQKKvfWLwRz> zwj@YiKFlqM7e@pjIc{-lJ@}PKe|yc~y=W#)Iu}bEXUhpiiFy$sj^NSUw8OY!$k=^7 ztS{(Gm#C7Y0pnx4OBa|vUD=znLdm<3GyWH|LLn#i4@IssUn=Nq-Dwl7m@V!abM&h%nq>9p zthT=lq03X%NYHngrRtoa7nbX`w{^3B=2w;dl5D-ujhp{b>Z~wt&i(nVK%TH??u_Y( zbZGwUzV)@AkWtNTdMACz9>NNl%49Jxbvw+n9M{t8pXm!cIGpP}5KZ3S+b$T}skJv8 ziL$rw5o$`;qQfddncgG&nt{aM-$;|H^u?H|i{Ey(+KyZCxu2e&t>CQeOC3Yh+iN zc^$OnPB|CPLq6XC+oKvJ9^@XR8Wb7SqY7|c9>$*Q*U9lLN-1LucpWV-JD{6M=t(fA z>+r6oZTh>0_0#I+TC|liw5+23{=D64{AS+8p+*x9nOV7xvM_cAq=4bW>8YBmk{t<+a(_P^7ds%(SoS7ilD zv=|Ob9p!(NA|d$WZ&CA4e9QsnD(G3Nd4oZQ{85moIb-R&6W4!o9N6H&N-+VE=CsMU39PH zwCCG(n6vQ$Q2y?mOG}MWaXw0j1i2W;d)-P8Ut+msP`US*m$%cEjnkyrH>9zspEVbuT@_h1ruTPwVKh%j$o#m zl4W;M?#p0&^y^E$GCverAsULoG-;+9UY?j(?kBrUq`k`%I?(OvW#JkAw=Z{ET z&tEk@=p4fB8;8ukTg?@Lz`n#r=DegL6<5UF+D%~dbrD5i7h9#QG+#s{#g*Bni z4;sFrt~-qCuKM6Lt0fQrH$C64)^fEx#1LbZC{UFm2x)X2USHZOaUtQ+NV`&IX*iuz zAb~Uv^9bO11-vCd%Yv8-dMhucZn1LiIVE zW<6DchYxl1Dyppxtz?RBLksy2fx^;Cp1(NJ8Ry zadywqF{jR~o5$ucwa+-feIb|YRByZ9=a@~W{=6sQ*iP5w+icRwG_`Q`S?-7{YT-!= z9+32tZO5!={5Xbs@brhoke1WWCs+%a=XV+pL*7OzZL!rpmEH}mm03ei$0a$bTo+6G z0X;g~r*%kAyPF%bUDTC~-fUs&w9cN2{lQ|sYBmc$bI zo0<=4@iu0<{vU9NkelG?mjN1BgIa0#YGCPo3;rpDU#lnD!D|XX$ zM}sOdL5I#&XgVUbAsRQ~Lz1KbnZZ1sw$o4BPWJz)i`CJ&H6y*E@ zqaY6Nu(EVrQl3z**!NLf^K|81h!le)<<#Bh(r+-6k0C0uBuJIsy>A`Rm@3rhXePlS zawa*M?qRUI!jp~k`b5PT`=x_*b5dUDB#5Y%RSos}g@U6*XPUwL*zfgBMtc|vfmz=r zTeyrH?WA>SD46c5qhw{b9tryx)9gMKw$u~u$=GCAE?>Yf2-W%+A8mT@*~0`8k&Z(6 zr1UXNI8ia<9Xb`N-qHPYJ>5ox6+BaaU?$qWoY;e^T5XLmf{?{UaVB+r9Xw@%5L~^J zzBUUU#V6Vn*OsSkO<@~T_DNHEm%D}Cs%S<|POITkR6Nsh7|ygQ7_8Fj4q1S|{#LFVz@lnH8w+RSsmyS_dQ-uO;v@AZEoph<$iY?yXngFb7xQg3KWa|6 zigD-c-c`0qLi8p&`0Y0B^z5eXE~^ozr>mE$?NviqZO_td(Ni0X&T?>limA(>zP5H-R6P21z z2!FJ+eE+0iTR>{SP^6mFpTP{jOI06g;C-k6Qe&(I#DcEoysO1i0GuS(Y~P(R$1+I^ zwvEtO*Y?%;Oxu33!<56-Q#3)FAmq+{sOs@#Fw$$Z;Z5~tXxJL*Ro{Bv#cB>omuJfh zv3dj%K8Du1VXszB79Fcd7ENe~_?Posb(YUZUX|ITVDu5?d{7_JZg8b&MqLtMAMnXh zi@#LP!62@2CCjF92kx>iWq&HRRUbEQPVTVyl3T1

?d@ zPv1JG5|)=CG%9q#Io>!(tX>|wU~nzzBhRW<()}ujCF^V9!F}bUbYdDGfAux%1ENU*5?6sUxIXN-v zv&d@icb^8&1I9x7I4g$*Qm+Cu1B0M}J5a_+VsbjmfR`O**Z@3&x~jT}y43ahPOZfE z#Gj}9I{YI2wvm|CqnUEqCsZ9vzlI?N5C*5`w%#u_Sy{4Y!mVIu>cZxFr`R(9wB379D*%h)>#ZNh$v~}tw3Y<%Cyc(^{V+ZpbeoJt9gwH9kzYM=yF_13g`Q@l} zBC+Q3A~!Wbw~)fq&V6Us;n0(6qEd)mN>V7c8O6C>GJO{Qj6^doh%r+^T@&FQARrGy zzLrm3Y!POhOh@VE$w}ZMS(zj7~g8r zmv8X5m1bht?U$2s*S(9oc$`Z~xXdI#jE0&_n>P>(RL55RP8nsHvPoVh*KSoosDrzaz4e!hrDfR&s)ATa(FAY6aA>bk=QLcQNeRK zKz`D%9sb$fN{gTGEK3{5XN5=6JEnlCJnPevw?6zCf(4_sYtu|-$}?Jj%XF;-bzKl4%A zk%I}H_eRC(J##Hty-#(y2WxA=8xAycjN=V%UlA4zC3WnOu&1MQMKnOwTh%;M+CuQ; zQ6(g!PQv%#*98FmT4ouehEb!L?a$3dm!!%U z0LcLNV1is=G9f-J{Gm!X@MXtJtB8d-vV1aDZ8utckMHTk)d<dw8!$@oSGTL{$?JDf0b};O>(|SP-G|s^yZ5=GCqPXj0SE~7Pv zb-M%L#yc5hwfDNRshbVi%`$th39l; z*KCr+$HM922zHZmt&SD@@8!~3-W`0CAlzK8{X!u6|sl`s?sX}*hs&%DKD&2^L zQB%)>p~eA`j;|Txe8?7?7qo5m616UqeY+O;^jkTDLemQ}flA+pJhp4=hwYx7?N6R} z&bZTKs@d_mu^x)#dp)@DwlLk|dhhx1{Jnc`Ou+<|nf^FWKaL5fp z>Reus&9oF^F_s~916uNG*s9CNsZ%Ux5gyXkqOuZ$L!^eI4$pFqW2?Wd=REI?h42K!mqgxm%!|d88gbtL@SQQLJ6!!{JM7Sc*Xs%(C{@^?Uhq7uBh(zK=^n!QaF^ z`z>jVTnh7=$h4R(4M!DIx$3%Qg{(yP+3mhQpq$_@q2~P>FtWLe-zk+d{!KjP-Pr15GF8Rr^3EnDPZf<& zQ+Yv*n}XMl)?>50Z@FU3J^T5P1xnQT1Y2Dle0)+dp)uZE0vQX=V-_4gL?`MTBSm-? z35!j6hvilzl1;ytt&r|U^Of9P4Xh;IeB>!&dvqb8U|Kce>HLZ~rO6mMEkZ*lN7}rt z&k$+L{9{Mno?_*|077?#Nn5zKc}HM1R_~eAm;qiGX&KfdtHHTa;ACKl9TNUgl)yDC zb&l*A&oQ|j%=O+8b&AKgzzM(X4T=x(`vbOPhJ6AuIJqqSZx@lIqoI$?TrV$Rj0P>n zDkFx%@-c^rkVCGX>>33dd&#YUVEo<5lgZN-8#gT&7%rzDS`+nLYp za@jSUQTkm!!c(w8%KhSy&G@SMY`?29rfTX4qy`}f?LSikBsCt*nDA)isbbbfHHURh$H)@9g@dg(1nrIPmYir$_0EFqot z{=(Vd*9-Skvd?8o_$AB!Tt@^l8@@s=Z?SePt|PDHg)4W4dMBAQJ0Zs?;cKmS$UGS;GWjRTD z)=FGjYxu*nymuu6*Ja-1fb+fml1ID`&fJ*BMe9&|#^35=S)jx#GEpiQ`we4%lV$47 z&_}c&i|1x4QhqH8^)0{CW6Biv7Ayw!8%F=;%!HAlk7|J!kIJ;4(f0~S(nG;?sLT-n zyx+sd1j-l{$qN7?%i_yo#d{~OH7LG%qB=ibmcP|NToB?lZmcnM3@Pk6FYb6EI)AUQ zw1A2GDi0`KY0~t6CJXD$}cTJ zDH!sSD-Atl@5q1afTuasY*|R_rJ()o7-9;^-*GP7J0mttn0`j4$>)Vp+Ouub%2MDU zq`A=Sp&f9xVoJ1?@=D`v2^Fi){Cn-wGfT5?-%|n-avT^nC5vS{uhi)iDF6ffCoO3{r(s9@pUeY%j04pwqz}zW`GW1U~fUUc?F)?(=OQH z?CBz`rCDum^i+D)1=eWysfWmR#IzoYXD}(v)9-0M=dy2@dc5hemwg)5=VLE4Ke{X3 z=7RNNqu<5Ba^L<*1N581i*FS5&IFV#d3GZ`3aPePyH@u4bulp!OFYT$h)R@$Id~U*iR%{eT=`3| zHxV92evhCnGVgBd^&PpoxU>HNq;43J zKhQNUAP*ncUx?HVqw*&f^b;eY;rc7I;r|-J-9W9^ej0$=xX(j3-oIwQi!r0tWy_1s*&XWSvJ`^L3-78v#7tHp~$!c9c>SxNM zoWoc7LSV5bN7Lt8m&b`FL<%xOvn6_s6eE?v&58Xxc%0Au^0gwC(Q0_!9+?ko8zE%&+6R-Nb|#0~68tieA`0@6kPIoLF(iuYs3x`dWGqN+m6+gB&dHDJ zF$)RJ%S$wS+J&(Hu#_9+|6wU$2;eU}xuK)}w3EN_vA^N2zj&{oD4nFJnv{<6|0gKj zEtT~vruAnZ-NX_7yEAUP;D2N3{-=)lpIN#;D8PS`i!@v}3i*F#>3D9Lw;NXNPmb

9@a+{%_bi9su8ObR8JZ*4^-R+~7Z%yT5<&XW2KOKM6Z9-_Jb+ z41w1VPVxb`ZppeE)(*tY4L=6c0QtZ)e-d{P5L^Z)^KLc}FE0%ke*T8NyP@&Ga1xLQ z3~w0#Km+Cj(SUit|H0(l%!t1#o9jQKoC0qu{68~!zv8{@VO-cB0fb_pbHZkX5FaS{ z%CM}tFbe^)DUu{A?o5k}x=#6Wj@7BOilG=uU0l?qqcqB?cRG2gIgtQXy68Ll&5d{6 z(@z{GR|@>2csjzP2QR#?IsIAbKIVH%mt;cqU3UjonG8tKpJ=-$oQTOGmm!O2B)+o)gl2REd@r>;L-9Pp=- zZyDA9WswqtXa)Or8q9#>)Q`RErgyseMf0;$*v{T%3nF}Alt@N!?xL;XeDF?v1Ic`U z&PtHY;ZlzJ1?uUCA?)}Mr!VE@^e6Gd!KXCI#Gj`x);8>{;{4fmOWBiFgld`Uv6rpz z68rA!r*TCd&?5*pgYeG*AHJP)Qz*4155p|)k;Q)|kJcwUelnbrbk{tfM38uP^}BBA zR>N^6WE$b zoF51P0ss&IgbTz4;o@cma6JS7n13zw->MTcfn(?nR<{+uZNeV`sQ>20-JLBg@4+c) zzI%YbUNqd08-m)B=BEtEeKP~!o~5z>Lk0kWZn)~3*Do?42;Ou5lmP%YVDmp^ARyrP zyj%co-rw^A!Mq@NKmH>xkQ<)zHyHo~;rl%ufRB#{KB503FBkj>J|X=k1Hrw;?=mnK zT)qEH#{~~#{hJI3;^TuG^FPu7!T)YA5DbBPquws-27PXE yV^=q0P1V7{l?HfYpFc;kjJ>4;oXo#HzV)Ie&aStX2;}AlaNT2IkWiAm_kRE{|2NkF literal 0 HcmV?d00001 diff --git a/docs/images/code-ingress/gitlab-ingress.png b/docs/images/code-ingress/gitlab-ingress.png new file mode 100644 index 0000000000000000000000000000000000000000..1401fcf9f256e40de5ca40b024b66aaa2a2fee96 GIT binary patch literal 47118 zcmeFYXIN8h*DV@~GzC-yQL2h`0Yw4nh=?>nq<5rAmtI3vQ0XAOOHq20UKJ%EP3aII zNbjKs2qEWQJn!@E_ncqny7r&_V@DAakhL=JImaAxjOlx|XY!O3*C}8y7^T9~$LcT` z88Hk-@QRECyzRkJoTgTbD@kB_%fi_^V|K<|pLyp_KEC{#^0hWmEF z$BTNL14Un?m($hc?B3N8@_cxAUoKjX;Hp9VOs>0E<(34b*>(LR^aeT}-7 zj$x8&!YBLmJD$2MPP_CUy(VCN7i{sZ+vlZvLmRRy@*4e>9%2+{cxJ-)?LAi_)Ntb^ zSji8c22}r1DcPkM>yw4Lx+do%t{_)f*1E+=kD{vX-<)n`>L~noD{Y-}gPD6MoxjWe zUh7JIt-Qyvb8EeZezX)YMy_K9Dtrju1^q!oBxrB!OQ&7Fsw?2r9kY~kZinDXQ;KbJjraQ?+EW9OfAjy0vI8&B~5{Z?|u z&GX){F&7vsw#EBzJj`{IQ2O|fwyJ%7*+*ZP=;oFnaWdZfOFShdSpea(Gzll6W)`_? z;3WZ~gf==6*wq;VUzv+zutQc7HbR0=@PMDktiwIev^qp5Ql`PsclJptzI>ORm zkAkpwN_YQ={2C?@HA`IQ51kJM_26eIgts{LQ5#;z84MIz%I(;{v4aZ{dIUU*Auo zSZX`kG6I(iRsGJiMSCbDe&PJz#4Sd`i#NYZf4pS)CgSxaYBu`!kBB?9axZepuf}NQ zGOWmM-gsnAcNFkF>lG)v;#G_H&QFfA55H&(X=&Uoq;Mk~4qTO+l8wlD{H6cZJw4`< z;2P=O-+{}oTdFlk!?!}7vK3zJ zeQ)$bXV(4Yx$P_4Bu;6Efsb!n$JOOtR41|KUlCckw{l~JGFf?q6&V}cwKCs4&%UF% z^J2&6Zc0)Hy(;e`yHuTOozSu9F&>JHPtX3U38^`AE_@XHDE3j;si`hRL|a_hx5%^l z@Zt9J?T6$kZ?fNnKK}H$;&IgvmG{=%357ClSx?k<3lLwl3ux}Y$yb~kIQ*Lam1N?< z1RImA)gP}t5${Vlt7DiUn?n5kguTR`gyMuIRxWea3zQ^LF>RctX_nyvl8RhUopNvH z9Xzd8q*Ult>Q+E1$}3(~tW}^^^8aZ1OQiEh_j=;WN3WYnNIk`DzXn9&6?_uz`HacL>|8 zW0vh#;a4qtfsSuU@hd;|!%5=$N!UrqNkFV93qtrpyh5fz{28nnwj&B7Hp|${+zp!T z-9}eOJ48ETI^@Gyl71^ss<@}_CHdZaAV`~3lhT(|ko-1fI4PoX#ZJ}zE;i)h#AKgu z>XFwjHC;a43f<>BrH?_XeMc<$Nx^TX0m|~>AlLaqp^J9 zxZ9&OEcCtRkNbz5pYP|==9?-7%W1}{k-aW=>h$XpH!k^bU>{)#NDG5AW#ijl$kQTT zA?V~|;@%Hhe(vsjWu?7@>&p>8$DBF(gQh;I-p*;o?RNcfomRc1%b+_=om*r3Cf_FM zBx0nnOhpg5LLlzROe&t?St;j|4_&uWx^IH~YwL&-%b?$#zM97CWXo z$lqRu$ed-FMa@gGn6Ri_XT2~)J$czv?5Pl^&=($hhIN)ndhyym)2VN6aW~j+oV#&r zO?Rzo6gA1(c*C^)ZN$e2Q-=OGpJa(1rOIAy7yrK6zWCki?}NWLB@_MM;qQGH`o#N^ zeSo|2zBBhP?!x;vq7&T5(tFC`D#~6xcD%wh+y`!HE(Z3KzD@Ec+lP5?0^Za-HhB4J zww}#K-B;bQ@};x;j>(5-iZUMrhHvhh6h58dex)2dF!I?gZ!l5n=jnysaKmeJ$s@_b z$>^$+sy=fia}EoP3;TI8t1or^RA+l`VERil(e3=(@&~2|w>KEZ9jow<3m#iPTlqnN zA!4%Vsr+79ODfFVr0ZZ{Lt#`3okX5hhJA4y6H#w_GY9UwO<<41_jO*))28m8ovgd!xe*v8Ig`fhRiXOo{jp&MwyLuj{ARsyKS= zrS8?@EqC;>0V~o7rYbQ*k6yGpb?LPx5pzM;g$!Kt8paFbNL|Ln%(%_qC2;*7x9`Pm zlVj8G?W0lmSnh~VONKD>_})CtJt;UDJVsUfIntz1@P{L#FjBi-2cAJv+dENTrRX?z zGvhK7Rk>6is4%ExW;&(SV_!~%btQ7~U$s1c`th)8F@CYA%76THn4jW`N()_+(Q)Hp zL{I9RV+87;$E!~Qo_OeB+OpN$gQZ~^V>y;2@lDg~)ce7wu9VK6rV7;M!91`|z!!LGheYj`dWzCiL) zN&Ybm5B}Y9iXTj>~_1c2< zjFsuUO;5CsH7}r)!&g!82da|aAtnB>bt*g~tL33uJ0)l)#`CVnoBB<3=_c(;=H?2E z@2Z-@5nQWFN^*a#ma%@KI&z|VGzGyZq}A(xprD>>JviCjdCLPonC!nz?ha?hypULJEI1-I#w>KDo$oorAKoOUUH+PQ znjkzw8>_jKGtt2}#Bj?w*dt}@*|q|Iz?}b_f4DFqN(kS$=zcwJ}OVb==(pR5)Am$>!Nv0ngWd(V7PBed>q^5b-6|4|-e&`wV&s|oAoI6=WlZc++ z9O|rl0iA6{74!@%P94|C?Y4vO`=5$@(&&xqJg53*{e3ZS!?z5piZ}~7I)4#fU5A|C zGik+KJV|QqqwX`|of?w#8@l7V1$S@%Zt(JBi_TS1N3ASezh-z=+7R<8DNRvbRvRT^x#anyk(%*KYkKuTI|l&kDMXJFFEV1E1{Z zF6x-Sx)k;{rP$MJk^0fp@1G%YHELf|8<5J=*m!Cbv0t1FzjS%&y7aeDE7B>d8ei)J zZA;kbUGHgQxk^?4yqcXGe4+12J6Wv0{}tY@Yx^72qIAHAXt;hfk09`81% zh(ByM;5wlOX~l;rwYg3wV=tOKMH10)o-|{paUqA@lVyD!&iNlRjikOlipfU5Zt6MY zO?LhLs-~TH@Vs#E(aOgLy@I~F=%ZVEzobVgX2}Qy=#mZyE?rQ3q4;^rTlleeyrjp< znlx3S#Enq(?pLmlWGN~Mbk{XHB;UShw&8Illhfgym9xAuPAl(4SHwA#)qEvhrp#1s z^o~iJ76*Au>65=Lr5+6$Ig>p+CbM{G8g+ z+K4ZpVN@|X;#KXNt<)vxa}To&#>C}yv&h1&YlZ+ zY%`^jWl@y+p7oQY&93F}f)*l+<*i^d^Q^PRs_C4|x7%6uQ!joBiWu-bc=3?cpv6X~ z;M?Pc)5mIuz&uah7ad632OH$#U zP-X8oqK5XKfsE_&a=M?KW^2yrlnx)Hn`$#z&bnjO?O+Pbj zxVUkj#E_^zk)`B{a#S#ZJJ>sB%TC)TN%>i^M3=yk2n6+RBUn`s+s}!s`*WDbLjk z+!y9TX^QM%8B;200q(@>YM+Vp80IgT8U_bMOjjtHjrZsOTBQFalWr5Jea<@GBACzX zVtBU3W$ug);-DA(NiCt@?y-~nz2T*ccsZ^YPH#q2Z66-i+Y#{Nq*KJx$2H$+l4R(9 z>0urA0w>Ekej`fjza!o!eS{sI8tA%g zF1u9vgT2|Mpl$uKfs}QSt1vN1_Dq9hvYlIj%0$R{Tc--Piu(@@|9VUfU1}(F6;NOJ(KJ&X3BA@BaUw@XFuspo(p`KvPXG~L&`Q!EG)(WB7*1EJ|=mU6nE-qGeINtAxLrxEul=UE3CgG1&$w&o|K z7Q}<^f_pcGgd>MDw)j%t5(QbW!EEaM$^^`Rr#)o0A$^nmC}eZo@ak7as)jp3bgHi$ z`;}eEK76Iv>l4tuc>is`G?$pBT=I(mmx!N6U%81*0uIa(@njH9F2B0e#YS`+DxYB_a`!JUFu}^kDrH_Vr;2xo(e-{w@kU{2ebhE*1?+d zqt&4=^8*J-VW$^bhlPHSEH8F2D5ma6PFl|7*z<wcw{PZKs@{?^foN?HuBwb_7Tw}>s@mB$!2&sO9*3&5i${zpc|87 zyP|G*b>NMG&~}I(KO8Nw=t{mDJIcUmECSz4dcW!OINO->(%!u{xjNDce=c2gC9MhZ z>8FgL*@=CU^DtSEfT@J)b{0WZuFlsT{Rf>KZ|}X&`kDR20p%3c9`n)u@$uD-7~)Q) zk`L8@!on0D%V>B`tmEJx*)VVv#?L#`qoS?QvLYY$Va$|Lj{L&NU(E={TUi9Uwb$8Y zQd({pY_ySzd_doA%$B|VHOAJBbT;0_m+OnzNVbanhd+0?E}vtsOS!yEqey(s)lj$X zcDPRRKy|h4Ed4B9MR)XX@A?A`|aZHs{L5EzOd?Fd0ibRAj|(g1a#fQz5jgzeKx_xT>P)+ZNxWk{MXX}^2d+<>nV)p9pLQ$>%o4J zoZx>wgDJCKCj76bNz!b@|MgTL@G04UJvAZxzZd>LB-CGRhln~WL{|U%EXyh0KBs04 zqxkRF{?9qz#wQX`Dw*6$f9PU4B>PWV(NAnnH9i23Ra02f#k1Tq`IIi`bLBSlYqWEd;qTwrYF%)kfZE9EOXk3vi9JYZ+ z#d!Ev=nM@FeYCW+)ZmW45@NbpBXe<{89BLx^+Oy-JTaR|^4SoKG*eMg$&>2u>2Y0e z{qt%*1Ch1Mw0Iplqa@{ji_L`17p0x2{=D8`q$`d?vqZ~6SIfJVyp*4h&&J8=`G&iG ztf*h)Cr)8ChaXPGPjV`FU$YPg($Uem?#wGFT&EWhM%*`!HAmKP$A6d=NnoqiyK_#4 z654)xWDzpfRv-Tb&Zbs^iWa+~@K80R#X7_5Vhv?dg!s z1I;4M_->2wniA)F-u~T@efknDr5byFC9+QL#O`jwwRQos(qWqqb2`;}D!rC2y=FD` z`Ze}NdL`VEbNByB$13KjBl?RT8biTHB1)fALw^Aeq$+*LhfhxzoV{eD25qW`?1VIV z>1Yv7^5+QKF*wbr9K8}7OG|c%tH_#BxAhu(kMMfEl0qF`8$Un4;idE7CzxQf{)IQ8 z^yGz>7y?_tHI$p7SH@?pqG2cKOnQ34eNikr$ysWgU%+o2UU%MFUZn+RT~qo(qq4GB zzRd@^;BX&2xQ)U*5EdSFGOpH(w@23Bp(07=2bT!g2>ll_?L_0qOfC}zu|6f5FXBRF zaxg4=7<%Yl!?`z>#Brz=PB|25aOWW&%yp7XUA=m>x~}evd*f6^lt|vm#{?_P*x#pyjr?Y-ZL>UCn&EE?AQPo#qXja2yKW~0O< zuaxBE=hzg#K{LAfJ*`J)=U#8d)W170PgK>rzFz#{=cmo;zgFua@7F{S6e&>m^x52|9$bNo2LqRT1GjY+m> z__`-=949zWMnsH-ZcdBqzdzEB;xb|R!aAze`0Txr1TgodJEAA>nD77g2Q zkp)F&`3sHlDP%0=3GNHuB+p7*Pio+y`Knj4XC=r!>ZZw;sPI1wLYWt00BzDO=pyxk zr?I~lDh)-BWCjAJuyC<9TM-BY0*3+!kW7^;Px3%baK+qKs=(j8E|00pka3D@q zvcRUg!NO{|v6N@jjTTW+YEb_Pbr^yAUDYqC^l!<+Om3c02$GbHj1fbRsXWwCI_l5W z*2AA)RitYS1FUHT96oXWlkCk*axuK!0~Kb7uP#e{o{Ck6qOGLJyLfFuM}6>20! z7F5BlKxL;bwI?f;OYJ*Dl#-hI;w+Dy7Lt)bFh9|Yd40 z_omfb(=Q^CSd!&N8qyJ^GU7J?D7!1ob?MMI{=H56_n@82J7y0@S|Tl^vjH#(?Z_L}}sv+?Zaay8y)7(#`!hqNTPf#NVwbwn!j`;q`JT^P9O(zRb0xPqe9_Hj!YR64hf4Gh-D`aa?u?qA;#ca*zB1`B72 zyFwhUI97LW^+xe{1qa5aKl}Ta(7+q}Ho>v9axtGcukWjk_SD#q*GRmU@cR!++g*Sz zeIC^5&LqJ1Jaq*Q)eO)!V(pQ71_m1RAt~wU@gIB09BPr3XSgaTC}=%{a$+2|d2ol9 z_lw%{1*lC~Rwa{MRLZ*y;3we0!|ioVCdrx?KAU4H1pBo{&836*wZ`MaR*|RbZ6wQ7 zJ*jkDPZ$yPf!NJ`Et@gvizx+jjbFwf457B3Y%&@ z(YfA_u>QU+^r^t8?>i*U)V|(8D^GpJi?=GhKpNz=-os&d!&+d;(dX^NX@8^Gy#bo5 z`&%xxF;~hT$z05p@gHo-HRg%ily)f#s9y4tAR728voeJC#uR@1-P~j>_jY~Xosk26 zp+NDUi`N8~92IZ`R9r?>oWWPa;^HEk!rMot?ZNxL(NwU3>fJB~Iv5p$_PhqBq`Re$ zsWqFX-)Q)ok2^1N$(z}#(b)#O_qo+OJ~!aTIhm|hJOb3>D*{Irj7}m;l1BwOr3@-Yi9j7VhKa`|5E$wpzx;{H#`or zhQc-ae}*t-<*7kW&ACpJgx$h(TbSNubE{LObWMVu{(&8L!bPVh{3+G~S;J@4B(nPA zACMap01&5hb>F-BmRe#nd}qGPVh!*ys(qKnDWgB`unyy#Kb@587(K~#9I{Kl%xDzT z>IMIjg{z+vz9LlmH1>d*d->Grz_vd-$jJNTli#9PC>vKZZeZ@}16-#AQ*Ol6v6Lb5!r_#K4C?|2UfCN9-wHRev zjFG6<4vpGi++Uc!e7f#Ywt_`9@hN!suk*o|s7qID@#A9u)Rr_g+N<9O@T|eiD)@R; z;DVha)(^f~RVlGnWrt)Q+j%9ScV$oX)W)K$*T#)>&4-BteN5$ZVs|sNo=QFTtEq#B zajITP&7up@SD-4%+`7@!ejA^m2;9ZwwTM`lRDZj1%bh9o6w7!X5T^57eYHfpjK|JEp;pEbBUNSW=(k~Ls9P7q zCuWWw%EYg&6%`(o8(Lt;sIeINGHEwVkDFQ2n;_FTs+yEd6L^MS*F`UK(^OpJKh*ZUuyV-YR%`7(izI8UZ)Rx6EN!cx3VyCxwvkd$e(t8~>`Ntz2f}1~4 z=#jrzcXvA%kW*&q7hi9j%yyR6Ca=L(Uxsb2g_hQzl7g&grAUtx`@X~h(oItq7vZ>vU}S5 zfi-_Lyr&oA_wBeT;N!_Bt2^@kcyd5BH0-rypj z3O1ipnHrw$_FP#EkoV$-??uw@Pjg2a9_4Q!({(-DuH+qJH%mY6z+`fUql- z#V!cP3zPjh{HSS1pD~XS_=4^@l7o|zt7>&_~j4No~IMP1?xw{ z)*8AkD@bI1Kd%uhWAvDIFRI`_HO0wq<5lpppZS-Ms9Q@$Gg^w#xIgJ`01by2 z`i=E#PCol>1Q_`oS3-qsA+^D@L1Na(fd&-@^%bq^T5z20fj z|C5`r5ZSk&d38eZvLZAVYZZB|wFQ7>zImO$D9tb)+Pl@bzZiyIEpap&#UTzpWBtLctZYE7nEG@> z!h6oYB|r11FMIEF6P_XFCksVeDav>V)Tj!m-e=-I52n~1CoRKsEIVF<^neCvuLv_d{LQr+rWx?N4^*To z^*^^9Y*y-aTQ-(E$$_sVZu&TvO&?~8Ss8escWI0zl0K@oR+j)wqcM!9>Zzb+LDt)a z&n^G@{;35cKf2$XSf*6V{;L&rEY?uq0}i_P0mcnXuz-jGMjL4c=CJvRhmoL}0`38L znYpeO0@qxlU((1tmWLKU(Le5M{ji@*ji-SxVNjZ;pwlzakZw=V|fThuf>`nHomAdjEe3{>Emf-t+Drdw&%AD`OY z{bebB+JElImT7~_yB?RpBzoA=hp`W z8w~6r;zoydS^RvKH~v$<#TOvj8$-dM4W)@FVeC>Q>h}g_T_=IHd`ayF*X}83gkr5< zBtj`BK+3ux;$}{v2GQt3n7Z_A@@XOq+`JY-6Vc<&O^Y?qGX;}3@RKvwzKp{UUIuCV zP|*(PmNML7SQjQg&;EOIs@n(DJRRxRS11oF8GPdBgBAai+SX#J^Rfz3t$cF^_R{_m zGz1MB^_sCKEgTrAm9F*sVf5sO1DJfYYx|o_uzG}Bi+4AT+^czy>=4sjH;Nxi#R;%WF#SUQ-_qVmw=cs2{`Tp7wZfQ-eBku988;eUkFEISL)`>e&~dP8OOAvGn}Q(dd!_k~w{0Ni17FBK z@15D`^Anv85VbG8E*rp_8?^hNRr%$Z^F4^iu#aN7}3S0 z(0lfzJ00m3gvXZonCe}H8Tz>2Zc+p8xAm4#5l@%5qK(C zzjlAnK%a~QR^e1{bQ(aX6q_;GSZIP5=s7(Ns(Qh4ie)3*lO;d-TjG)Wriq*6s&g#)&IB7jQP8ypaK zflQm5%cNATw~_je_4YTvsxqJ-j=G7JhJr~I5yRwyspx&^YMf|URlhzN!Ab_n!9e?k zBo9P^0be(m!nkRN*Y@|WoA^GC(cr#aYp-u3lqXZqASkHL$WgoIR$yRsn#uqI|NIKj#saZx%pmmj$fXinhsM$(?`A%P(-q=CApf8+-`5X-4)*62kVRX} zj>_j=8)^elqeRaH>5kfV2jZIbW!csZAj+0#t)-fLLkIqOH^;`+<`k=o9hi}ZXS3en zQfrP(z7GNZ+6YoB;U24uThUx`{TfG7*w{*R8T;yhMg#xh=3M_nbE zc>x1O&T`%YJ(UAZUUphyUZp~fSo7fZb(c7@i-<1V?rt|`*pB_%S`&_;xduo@Z-@(A z7z9g2o?X6N*{$;EaoHT@#0VyCTjUvf{)5P~j$xj|me^e`y2MiJyjbF+-Z5pTD8Drw zE%gc$r@IA4(GrPHUyRPOF3!A|hnQFu_pY-nnc%%shOT{#^EO%@GQ5$6ghr}%ZsIgRYHLwdvY>T}I(VRJFA425 zbqoWF+hczX5Zd0z1cKH*DRGG*TYnoHMZhP3U=5V5dRju@@Q8Ykd4xF*i)1DzEAX6i z?SGZ{v2QOkB}LEaZGc@~!1@7>n^-3y&%IHiTt#}?364rcV}TC2rdyGP5&u#vU{?RZ_T4g21!Suq)D?=YEhD~K3ExxE^QQt4xx*oU(pfMzyiFL zMkIG;8R4>RaS(|aZYApoW;I4#P($&8F78dhJ_ZLS2QUsPVI7Hdn$LfBSp89@?Y5DO z<5{g0r>bf1@q_8NpLKIUF95}GpWJMrURwgwHPj8`*bNu~rO9U*Pj5k#y*YCVRcPvog{rDr=T9%5eVCIH$yE271S zuu0)+E22jLVHEs;>+_C6sqiu<;FSO`2aUB>56JzxAXBTD_lfgENGaRah9ll^$~%dq zaw9KmZ=hPYMfsO1XJH1Xt*EyANnVr)bc;+s?KjJsCB#~y>S(KyKi zOJnv!$@Nnod$WXk2j^l_N*BrIuSg|L;vGqwSJkm=R=NsX68tPS&HbcM#tJZZh~AJ^ zlr%~Gs_SDZbW^AU4B}_NwrUE~HyW$^geF0+f~ikwV2thD#dXs1cwZnD4AwA?BfX@| z;AvoD9*kN~crjJGxJu?FgWep+vdKrj#d6rMlDNk{fcvi~ecVfVo$MA!Ph zgK5&F@T)12XO`;JIbu$%{NGshU!ppdo{cHmRL@T~`t%{)@x+0af(mS}YUB1v;*+S` z^UPdH>#L$H+!vcx?4@4eL6YTR;r_4l-mja9^c?L?j5x)*q$GE#k}XS)Mu=t(PyFh) zfWkuWVU`IER_R255m4h$7zai5Ho)9OJ#LaOr;f0o!7Na;bafvhdC+jNMgt;4Yff}J z_02z)U?dZSqfimHX4)K=Bp-ie3SHMuGB%nj!KyxHIBp`*VXb=<*k4T|+bA}>ugJb^ zL?_p6Rll@SD*cilP&JtSwpzRv9>HxN*cA=_qr5mJ4Jg- zJhX~?`tmYq<*2trpH+3`(!j0Svz>`@MaUvKzNY}QRiMzZEyn}i(15+wRmK~H%Q5@) zIEtI~>!l?cN*Y6ga*J^8Yp^*pr=Ha4rE?zIznMPC8dV2vz6suZ9AtX=ywI(JHm4oz z&;EW)23JO}rnOL&kJ#mE(#l2ctP+vW)&m`I-eHaJ%N` zX*gui1>6Y(u8Fkj)n`-dWQEO`Y(r=yI3B@!Y6_Qe36GV*!Avh9?0P8g?oNyyoj_d6 zS@6mL8dlNpDp{eD0+$uSX&|<*#{P4y%50luXC&8$Pp$|4SLA?uiW?6v01(?nnI!(% z$X9rm*-achc0ND~3#vwyV9b3-?^f}H0uwdDKv_-QZuxR&g+IrtTVhV7)(# zyoGRj3L9ge#`sT_fC*)|Wo@#qQseKuJbvUCG8+cliK7Uq&gdc@zw63DYtBOTSA0Q*SqT+nyCB(y%EOea>YaNjR zo5{zxAgmXLTJh$VIDhRug4gEmpZE0PTAWX@R9E+Aq$D6!MP$#90Jdgz2VB%VaPvpd z`i2%yy_9OrtaQzF8IOH}*ngIV^skW<(98hb_dZ(UTY15cYFUMg>GHfh-FUUQ)?C$ab(`NDWj#Kw+*A`U?aA3f@DI|xnGr%)9syFw4V3ll()bv zXniaeI$|FVjo=xw=OFuFZxK|&S_c~pA_f0yi{NWhxHdi}?0EC>0orizC@#4*=AI_O zVJb1KCUf{J^>V3Xw8~9^?m3axbeWog0E;_JVq%e-ch3xi^65i1*D$1uyiDfdstg>A z#G!h4NOFL>B(O0Wwx&{nB#LxB883q5W?YX4)e88!LM)_2rLDGnm8$zle9+!Ot1vJP z?gZa+8ma3B-Lq}$Yu(yb$56HvXmAHG;=S@d>=?KNt{W$xrcWjsp}&hBn2iADxcph0 zunp=r%eNXKfi<#~3ljg1;lBLZMEHg~n zp#P7rBmn%sn!0$Bf%cC0uh-jHe0P|D)e;TqRsHNN3I1CZ5>Qb}U_)3ysDQar?oGFhBy<{c+YtW>D87Q1Wg5h?t~+vH03WZF-w%qgvl? zo_6wq8LH5IU3Wx5&%kM{&DKXJyQvCf9t3GcH^k-(joxyacw5=8ohwd}Gq|M&+Y zb}v?aFIIjh@Si2y7me4y33B*t`1EVLD`FQw5 z*SVQ+4{*;t$ku|SL+a(KH8jw75m-o;-DGa|Qqc@ju%$66RVmACO#VPY-z*dLvrbBD z?QYAu?RA(?Hzc3}lgSXY%6eXcnXxz@?KD{Q5-^$8e!Nf3k<<*o2CR4DKtPe$x+-}A zhH%g%yEw{q^}xsUzzpqcJK+|c?9LQoihxJXebFn4^sZ}D3!$8k6FIA)&YL$(p0=R7 zcbilmLML?v7v@2W;~#akFnunyn=Z>UjrcEq$A`oW?}iNk>jz}B#GY+;93eQ*L`W28 zf}4BDM{obDnp3qnhC=EcBNz<9*uhxK`osBHz#lGzl#8=h5nM&h~KY z+en>*1OM=DKl8KO&mFT}2E9q90P6o4k0|dx?3%m*~j7x#H$(D%* z7GPReT6A>erJztQ^5Lf3^N)RWQFA&GP4AM~kTn*mWXZ8_0@wSSN4lLELko$oIWc0= z3LKeJn--k%$$iT9bNkjdL?XrenY@8;d4^o^EX5}xQ!;;5efLc^ zmGRij@ytF#KS=NAb$_vnDvnzMD(nKug(`(}0Ia^Xm3BOeYauR|{hw;q%pFmgjN=AnvC znmLqh^_GSPF33rk)xi?sTUS~m9y+a}#DnfDEqSn?Y^0i_8Jt1%0ZT1Jpx)<~6O~3zp}Akm4H_uBA$`z8p0GEqe8 zLs|bJ$X&()JODZDQZ|?&M!^&~zrXeW^?z;(redK+ym6Ip?LeQF28sw&f&Xtq!~zcH z4>!eTA?c-nVEo)9?+ugf{D(`P*Io?ih@N;?&G|-Qr z8G-k(2Xs&atqoY5qndu4X@rjwrCnr+Bo+^{TgCk45Moym*;r%^Nts zEoCldfM2BQQpu$ryAzqD66)|WyIJWnk#)T|lFE2e&aBFPu-l_v@O6{cN%>qRDD8thc6}7{NLR;Fs==cesv7KaJZd#)6em z#0KXFu%|TG^2xFh;IaeC5fy9zgnt&%BUAVu{xJ3Um+aNtpj`cP1m3ral4kyTcaa2) z_@Edz27VfPV~+jO#Xd@H3gV9&+?kf~-!P!4SnU~1EaMoAy_SP=7c?&$d5a&Bnj~d; z@mpp>lpWzF{Ov1nRpR?b{0_SnOpra3LV@v+vDqkDY0IVN%kwmS37W8$Ec^Zfy$wRy zcsBTdx;hi>P4AT!A7&zb4*r!gNgtE(hl?xBapbww6E&zK(P6l3y36zf zVdb`6z(|14F<|0*Q11jO(vVI7NJF|S$+{smC&qWDRKI4T19ENeLYUovPsYI!!s{+* zAo%5%(DWRctoeWzzRwZC)2)}iC-|kLt9)dj@?{P(=E7iUjy6NGskwek@xh_~uvRKg zs#BnqG;sg%7DHR^HFj&(3voXJ?1~7hiW?i7WG789>n*f#ZP)27ZH4 z_$%sL2spz}U~l%TKmN2DeF4_+KkIm~w*2JIH!ub3tZDih19Pyj^%`uI#w+HzAu%>3 zY+J%BrH07tRAGb@)fUuF`UD|HO8(BHQf3=tB59pAqF?KaJQX7M?Tvd=qve$)x5_5%*7s(M@^Ki4{Iia$$T@wpMj%Db=I55u&R&g~g`EOa{nj1kF#A zS}GBrcI}{{#N8JM>G1ATT2G6pi#@hzb6hY)jrVao8w~(h`NdsLe@5ARgnh8v53s|A z`JM4KqYJjTH=Z^i0P+Jl?7CNo_2{3X2<42;5$1eag|ZRGB5VJ9kv%>55>YJBi^4-? zXc|=90ynvdyvT`4Q`tz89up(kbxjO3xM>v>IZ(y5mQ@Agd+Z6j(2X`A`P1PQYtOfgqB+;;7SS?G5EA2VG4XdYDn8} z*Pp4-0#-GWkoIsl66|^xudIl@-1dADw&A@+~+U&iG8lFn_+lHJ=*O5G3kFk3wfIgcdQR{w{T9>2yj8cI&C#$I} z0{*G{y4L2ygIJ~)Ao3Hty<%3oAobA)IDLX|;sB-c<7uF!fE2p{xMWeu{Xe$8F2A6Wdt;eQua+IoE+xY;oX(cf-yM32iGaVh^_Ka7!XAhp47270bm1K zz2GCs4(Ok1c@LNVb)w~M2}a!>2nycn-_>eHJ3##(T2_KSpk@yA!@*jAbV})oiPKN) z_bsU7wBT3w?{^MvK+f=?RO`eg&;Gvq{)gSQ3ncR_kSTby6g$4A-OtaLdgByqR197# zp~kHeKuyUA@Cn>d8PnN}}SJb?^rz$eDuox;o;;l6R8{(#O1O(@We5a*WJlE!|2 zZx<|#sg2de_OS`~*Ea3q006u0#Y_=(`&pIsw)aov1Hz}ScXtMO{u_0CPvJ%)Ya2ltt?2o|U_ouAO*KODzvida1 zjFw~?%?Zx=FN1BUJPy9qu@X#t6XGQzqpyvd9Raw^9%$QQda4(S8s&a+w0NrlYqJn? z)*Z$DRj6?`JJW+j@KEv$Xz+p~U(R>c+*%p^Fb^!+JqfH5TB=O7xycpcfAwYz`G)dF z49TD`!|jbk;8nM_(wkfIY2ROK{W?Uq2v!g3R_7~wA+`Jdj6YiJ1#Yq0oD(r?WM|@3MqgVuT@lCsBov~kDZY4 ziO$(1!A(X8EhfPNZYUc(YMY%cG5Qgi12*Ij3_p)TX1oQ`HBoe}8a>LtY_1L3HX1D<(P@n=Vd7D^RI!02Ss{V3GQ77t;}oL1(P0Yv>(tHxBU-z;r0y|O zimm9ZVs4(ltGD=I+;4U^#mW&_oiBi3_5w0?sZg~qpn=m78|eX(`z@ON+q|Fs-5y(E ztane%EnR!12vfbVy$KI+ChMnK*U8Wh{|{kr0uFWiz7LNjA*Dpg64fAElzmHD)L4>z zjZv2D$<`1iZBL5I*kxa`k1Qj*@|0xFHkg>6Y%!R!P4@R1^?ZNd-|@ch|361ZM=Its z_jaAvd7bBV?~>b7DzeoH;0X|I%18hNOnSoTOS>2qwI%7Y0%It9qO^r^xEi;kw6dKL zi8JAnT{{`GL-fI@vP+O2{5yignYtR8P8Y;M=v{5G`=V5`uF-Fx$l5J~C?6V3;WxFy zD2z_={5a%f*TmBNP=(xQ+q$GaY3~u!J%D-EX?94>OvdBEHJuUiv^N?5;BR4iGQKSs znl^ifYFHM`Iotf$8NclD4D*LBgU}9n!+6RibVvY;D?z+^Of9fk33;sB5nBeC$nEwjCn*HYap-B#IaF zxdbk}0PDi^(rQ<;1UI`Nj)ws;a0zm0mD7c(93+qBC4V)t_lN7RH3UKxUBpivL;E;g)S- z7^GqzGZN~8tvC^lSqs5-F*+BP03XFr@aM`^pl6zwSKZs?(>nY`fJo>aZAkXrND}# zhvY|Ue-E+aNBCcS`4t!a*D*yZ)z2EZM62j@8eP6b=Y!P9b%UQH>2>JpjbOEI%Pm-f z_oB97VNe>GS2XZGn1GBk1r@!4!b!r<^}#^pz8fUOnMbiJCs47DF820EHZXNzTflCx zGGbE19@+6Bu)F_$^#P_Y*E^Nxw0l9UcZ}ec>(HtAw5VJ5d-mHpWuJT^1z6I|dTs=k4J6($h6R%J2tlV*V{{$>G2Qs^2uy3qH6{GBK<35?Ijs=X>ZTGtKuT#Ouwvo8 zeb`1+H=;qD+AtZ(s1=H6BY5 z%9s<@C@Z})<9ObsuWK#l6^VBk5M~`nMtDZZ^qKF1$~+5ssByMq)#@$)Td<;i`4lPN zveOclVffJp_LrbI-}q2?`>}Ry%{{CKh&|N_hhJ}sT9n?D8*%x27}eR zgBfRnZfpW#4U%8r)}B0h;$n*J1M3*1%dnXscZ!OxF~A|uoPza~N!Q+4ygnzVwzz)_ zZMW*tpSrV9^Ym#5Gv^i*7-D>w0V5DI%9-_e}+Q0hSn!tl3UcH|l0_++FR=8)bdW z)z*8;ty%)gHY51EnK+R}jMGbFiKY@puj9YuL?94_df+R_{QP{|E|T6-JAH&TEL~zJ zOcJ%9|4vumIq~qLgM{S62NBqav#@s2@BM|Oz8CL|4SC?;m8#-n8z*DezqptjOgniz zO;SXzqD+Kz5ijdD|F_1$@v$Mu;vA3vop!hPT-l+^=yfkusI5@OF z4P9TZo{q#zJTM+6{`6VK?1JOvGgOyI3gT(jwyk%e)Xp4sW_6YvyScDc>$BmnmdACa z03|0^z88Uw$t6;vvJw&wp-}t`oeQ^|XUz*tVPC={cdW<7c5Z@q1S+R}U}prI32yft zavVe(Yetmd8Uq4d_Y=T78d-hswcG9vr?nbq$AW{r#176qvUucFT3l`ZPKy1&`TJUv zZ>*Y_zY}j=o$sL13&6-5UAF)fpy!`+ccnUt%pz=*@_2uwb4uGaFGR63#^^-x{%F!% zx0{};*&57!vW0oT+CLR|YVMg;mE-8dA_S{F9PudwS* zm(=aD%K;DWEj^$0ChLxHtSt3vkWN1Vtpu!}$6ldoY_|NNv-lU>T?X7DeT?63=fz+x z`%NRP)|RjK)9Pv75A36k!+#ajf%Q!|MaM1{4@lHQUko;jcWP_U%^v95J__p4`kYyN zPY-G(h>I~+j+k$zo6n-$Vf5tgEsRToBdTJ^>o~_bmCHdMJL#ph632fH(%2U+lYi&C zRg9w;FDXaLYc{CJZNAFMDK9AzM$HGOauKf!N8%iw8RqsyA%Pf2xJyHI6hs)VgVo;q zg~v39cGC-?67DvKmSnMyD-46Dwz=kegUcW0G$XHDRvzsD{gYl?oKAL%M(I7_=cCH( zO(o8jv%;)k0|g>`RC(&8Y^`jxj)?d|N#Q_mw6A3Ixq6u*WOL?c`e6Ymo{w@L0URZ1 z`8-RPAYY}&Uv#Y&5PMpUTm$IENAzMgw&gRpG;hd{(PbeJ>0k?h9jxoyw+x8f$h>U= zOJ>ccz(VTNg8Cpq8uXVf8nKR0X8YtV0P71XS(dTcP=yDT4T%6sdI$2STe6gERl=8? zzF++Osj}US=cDNUXdR#i^>I48Sj6Weo;Eq+%a z`Yb9JQ%+bPOqP;#;W0eUQ5jMY)Al{!CGrf}Q%qBd55?73B*4k@q@{4n{jfH#Xv@xt zH|xqOZNA@ryz;Y)M)9=F3V$Ll>Ce0QJ{bf7#%dH~Y6;W1(I#X|Jz{Qw_w6M!V z5=Fg8t_lg`LKl)-5EGk$8dYSI6Ht3UkLWQ#mu}lVzK0F`JxCvPJBB{|K;enHxHfFmJR6^k8}kIK1-6Y73&kp41|>(I<=I zcGBFK3mDE?I)mSExOGl`%$Q!rqqsNcZp#_q&G_u&nKMcm#Ey9lDj$-sZ|4gYc($s8 z7PwFMz=5xINHi^pQVrJ6srZf5SKFRg`qHqFl3)#6tcTgZ_Bt1Scdf{;s;cs}-1ZX+ zD)%3c+N|7~JtC;Nxz*eDR9+6(=F8cHRMt=ula@l3jaHr$>y~W`6$~0N$~X_pBZVQb zRfiCxO7rNiEX@9U7uldV2*1%wLCv43NiL^vqQ;5LKQqCrYix`{~d!HViKD zM`U;RHO+50gz}#RfjcK3+4Vz%kzw@Wx}xLzij{-Ww1qJL4VJ*_&(g*~kx z2=F=TY!sEIxhU);}W?9+eiIke;~vR*`5)vZ(|r;mt(;-#?Aer_@qx z*FXsL#?vynWp}sHna-D9G8m#LYZS?;-}f-dc0+*@P!t#;rWPstCB^YevuWn%OV0CZ zk+|47JAJmTkirmyv2@xIjm0ivMF3T^&u+7I>uix3SMP~99DXisOK8iROI}Iu8X%}( zrOx+7dkg16hTA(Q#Z(<|$nf{sM6_sgrn4~FZ-LBTO0C3T1O{IxklcdpAiy>by#Vj) z`^|jEE8^`iVU>%W zx&%(fPmiAAR<+|YuQ>DET`CEpspS)Kldc67i>gv_I7dRVdT2r4n92<<_qG5ZweTps z)Pfb|GB!oVm1O;ZL3a%yf>%%3}Leem7SD!VL(#9gWOn8g^)1>djd~AfS)Jkjd;-DWT)z9GC`&wZ zrk0*0gE&UtzZJeppiN9_XZl>_g5`#gz5H+cf@3@vHk`i($d!ex?f6F@-B~){Iz%IV zaCc@T){`6B2$LcVXGwF(U0s5D)ID#Sy{Q{J-~M1&taX!Xt6HqS99z*Fq#hX;mN2-$ z)|B1PT=(I6F%E@AC>klAMAt&tgOgG#=~v(Mbd&xMbuK6-0Pa= zVeG$d$K#Y}w~-w19B6?*_@j}PJ{k7*_E(qtZpagS&o-{#*_@vXXPAH^rEqO? zrgJcDp#_&r;d#>f%oH!f89f;x=|y^#8oN1dceR?_mfIQmDu{e9?Axwgx~5E01+;km z#*(!msS_uPCaYH_xi08Z)H>!V4up2=6phqd7|f3Qb3b__%c}dC8j%+G`wV4um<`S$ z8F(my(w`HJ_bx8sxEt?7koOsKLqOi4B}_AO5R1Hi!4T>ES2dAt*KYD96&0_^SKn(L zrf~)hH4#L4{Yh5(t2?)Zt2~@K@eb;B4xIsA@#d;-Z!Gk<_VgcnhLmX}YowS-uJD;m zAqF~D>jL0-RN%rLM~8HXayUq`4J^;gpfgwM@m=aD;)_w``R^)Bh{pUstrr?aLDmcW zgjKCVL=8kOn-gHMS*dibyJ(mNrfxC~eA37~qr8vxJfHK-3RAvN4quS3osg={Uo zMI3f5l^bbp&wV0SI5AWxB7RL_^rXfxLt&(g+p2l<0C@Meb;lvCTN?0q$N%T?h!j_c9N!P#FJsqbxY+~Xrf?6C*|^zA5SMTLm!1g2)Nt2=3B(81;@hy9tbr5a}R^yWx6MUh<9Zi z^k*LK+mVGl*=TZ5wvQ$E1lAr5+eUfF&INg;T$v6DfQ>OaBJG`>c(+nm;v|MY3;pRK z&`Mu;v7H`(vXE||4&DYgs)WZG$BU5YCPGh^AD1FKlHo|Ag| z=q_Siul|$9C*ssjFA}75!6M)-07lJ9$$U8_r8vUT5-G`|y(RnFLMThsfC;w+Uqyjv z%UByC^!5l`&^M>nQ!ZUo6hb`B)RJbJ&)*e4#*kUVb9a!OMK}r-_Pc;dtq*(rze#Vy zROkv|&GLH$WbND9=4T%DiGH{1bn*A>mwRhN^c$}(R`0x?Lt~{k%4lqG;fJ^VrreQy z*SyFK`kO3~8a7BEQk@!wuj54a7Cu-hArodDQAp9B4pS<9U4j7s-bH73Ake#}&962h zclm~P5{`Z(9R6r_VBixFYI}40Tz>{hXAXY(MPB%CzSV-^_ufK>$@4|{K72!|2C`ya z9Zz+a-hW)NtOi6M1${csc0cb@KFE%5iMV)spQQQw3j#&2shL_MQx@T9Ii;GqJ|;%= zC=`AD_Z`lrE6~+~z3HaqS&*fG`}{pl3Z2jD(C|)H(^9x5wW#}HqxZ{5au00((Qx2R;1y5rP65qQ6^$HCdJPq^_u0^jZhTJj77Mg{+-J(cQw%Jo>7*FTg4-5BlsyMpLbNfhaF59|VO0D#tn zgoN@T1R)2+j?LbDyc|uk`pmL=C1mD(`Xp8yYghZsF`0dc#_uroj{`MU>rL|R*o3aW z5+XLAR-FEfmewn2uyed?4`i+>%g3vrcC(25JstxYAf3!M0()dZF}v(v;>P3~^3X>J z%9e@kqJ4}Xo+k`z-iU2L@p=nfET;Ikt=h2xAM`LC#ORlp5hdd4GHk+1d`3m06;?)N zRTV*>uUFYe?P$XjWNoaR;&m7Y?=z$vgOH#`Hx9vzkOo#}sRP6ci8C^oKE|Qh<#48t z7D?_Un*58@lyNU7S`xjD1j{;6?nToK-WHEzMWA~uVomVSIwy+zij?Tvk-%i5d6HIx z!(CyE{Nrc|jw`0{gRy@;PZTsC5@`PX<;$0x_I{;4Mxf8&Z@_7MG5C<^<$y+VG>x2V zx#{*#burLtD$}|w=%i8)7ta%Hj7ognGwxByD@Dt5)(t$&Z-0~xx>X?eT@$!q$aRC< zhOVjd%&9Sik4n--F%BMJ9?WMPZefT#Htf^6XLe9-x&Uxx$>~3*hI9_Br|_e2czFd* zg|wD}`CUOx4SEyhtiI7c`XQQaKFM;!2}Twn+cp!j|Ep5jx>6|@Gs68sanhX%D?mY1 z84;GkN&F?U0b?5>nfc`-@u%O`TH@cy9=cvt%Uzg~7HiMjx%Y01)`C~bX-_aQ_c z;5|z(%J1kVctFntKxLSeixlh$(KlXBO97qBN+9)dUP`ju6y1mJK5xh+!1%Cl=@tWv zL|ohuD^ON(F4853ZOm0MrxNT>%gNnt5IuzUJb^kYD>o70emBM_FO=)dUgTO?A@XL* zMunSG9VdV8o-l{2xU7<>(cOHnUL<44(vfkoNp4+U4O%6>8>N%+MOj_={4lm3wCV=p zE%LLaSub+$%EBWbNp=C%#ek)Qo9{^XWau!TcbiIp#q}3ZlXTNkF>LNj%|=CLAVY)M zhgFtiF2O4zR}?mehDhKx{I4c3^a^g{G6epx$&-d|zv6 z)S8r}O5~)-=@8g`40(|MTd-+1CyqAN8*_KNV8+k3j+=`(nht+0o>TG zR-F$z9zB-0c*)ybB>~m81HY?Fy&$bVHnMw39Q=a_gSXmxJud zWg}1Cj|J7#t#8^^MkrtII2Im-^&P3sIeHg$KvwG(QJ)~!5L-%exc5WrxjWgDGoaR% zl(^RBA?~spozKwLabj3;tyOKZmcq;2@@8T(&Q8uHZ^u2<2(Rzewg1BMU)I2725~|y zC}V(|A2VCVBc(0AmMHkj+kS&1@Yx|>O{{df>@h}^Ee^pBK?y?p`wfhzPur0d{ z-Xt#?DGbwpQ{x%MpWwt6mYsG7zaY=BJhQ~Ne7la&UXX?+>BeyMzcblbDayThmf2A}#0$uv=?!vNFKdUx9qtv4c*vRFy zb<=GQX05K11)3a9^1r;|(G3ZGR7?sGC;MSSty3wYc1wY$aKmjmwAyUERvX8#`&W`* z9OKvFhEs6Z$|efiiW1!uv>vmGz9D-FX?0n8l1rl;i%CKLuN}wZrl7(&qa5TxS!>0= zniN@+XXBI92yL?Q@www7t_4}Z$D?r^ zO~V1EpAdRXtrc4ER27p>QiKpC8u7yZbt%k)K+UH)k#yx+gLcgGMIaFeo0H4_(s$XM zyHi(HMJFu{k869SBiCrMN4pB|UBTiwm2Jjd`jWi388!!Y^=0aEvWSieCRKV@%awY=;9aiFh~H2t|m<$o9~X8T{}P z=h9L#ggs#<8h+F=R$*TYPOBtZUG5SuWO`X+7^<>vpSYA{6}fFr$y8p-ZKc}#Wi6o; zZ|x$!uNlc4?|_|CWQ@JQ&wyzB!TvDgXb9-je+X_okN=yLANrayc--_p}IWm zI~0=iV~E~~MuhvJ4VIfu{tX_`t4Ol0T?&da!iLdEmU1hUa0ilwBT24bFpIyxgyEx> z-jACvLx&wDx#OAHiGp){-0a}Mok$%`+Z^T}Yz`6NVSf42 zR96Tbwl`09w%b;xHbi_dQrqi;(Nl|X=ZUbTIC=4FJ+BqI@YzU`gQ&keifK`t(0DZ{ z@a(`&Yl!<(TL&r79|v>JL3H<@jbvGsiF^`KL`cb1xAVE=I|!NR^NdSmU!V2WidwHoa(S> z8*`tzCfj>OHzVtqwVrS8zV7+xw~ExTIT+NwrBaxbxXs}GbPkd3rF|TUY5hYe%MN=r z{EVO0k=(HUgmiSmF+9}pAE|n31p4B_XdT@ah7DD8fy^&?=kDOq-)W?w*7#ZrN|-hn3aiVsfoO<~oNB zOze`x5n0d{*KGt`j=#TlGTGA0^WZl9{N5W)?iMCx`1K`9V)aXt%iC zv978WW>mdXcPkd8nJ$vATq`7+y~oz-f$y%e0nO&B`PQ`ER1kSBB@5Z;)h{>t7j$YP z+t*WNcC8+K@;>r9(euK)vb8q;GH4+<`bBE+YeT|@jP#E2nl0vv!m-oMD6YTeM>yZz z%nQTj?JeOljAq^x6mwCz_F=%)ubAjA%C?)7b?JTeV{p|JwfWIxEO-7#v9`|1gf8rY z!Wc;5BVri_rr*gzqBJwmDSvWwJt1}FP%t%!Oo^ZMdQ+SfyQuUw1P1`KQTf!ydoTtr zTK0E89iDLy(2HGiJ@j0v4m2*9?p zvG^NEW^{p@JRiLg7-oJfaam4J+#FTq=>BNwnCASkp@z_g!@-M&&?Jq9_tk_WH1oKX z4hL)4n6nEe41qgaS>Jw{RaS0Bqm#l~?@WvswE)8>bnE+g^>DNIoPQMbh|C5O57iCX zX?|Zm_Rs-~l~297##3aHT>)pOx9Z{I!=XvvOW-!=p3VkcSW3nq+<+C^>VR zyQokgH2pa@%>*2fMt!sikvU3OyNhjVRb zJo8=cW0sy#Or>@MDQz5M?nzndkUtK4N=A;RP!@Hje1v_cMn4A~EsXov|U?l#Y@OPDtkj*gzK8XVhk#*}!{TpQ-0W52R z&X~QS3hxUPCHEZxxybC;J|=`>0-Kc6Xy)KDV|{Ad zh=P{R{@ZOM9~>6c4PcI#n4bT(in=&vyML`S$~#Wi-uD`Y)dpb{kDxt0FTr zjcl0wG=7Wkj?yecDJlaJDfd);z1>YWzmoaumJ^hma4(-Lb%fYd=2le^s;iB&Mb^ry zi9oRt9aHA+Gm_@+B>V3iZO=Z`aI~#pRJo(N@)`{HPAfX;h|c(Y3Sm9$x|-!5hncnQ z$3Jy%SYi=B@Wh1G;;#LtZ+qObpYcF_D4v}$C-ICT+J!MZvdlN>gj(kQ3#iTjV$A6u zcNbP$Qq}QWzclN`0)``|uak?e9M1}sbXZG<#$p})Zf%s{*)r4ln*Hp0s1Wg$kz^aR z1CFVbZO6pYOY8JV4M2<+qaAAZ15@BUjXhCn|}5dNtXe+p0Vn2fcX zHP`l+9=ScbyKa5Hb#<7$G3=WF97K!7{w~E9##l%FJE`5aK)!)fU$vm&4^=l>KIR2v zfu-Gofb6q^=;*Jr^&w#Or~4(&0SHI6_#F4TifFtMj|dGA?ryuhCoKE?-8t+v#Tz&3 zt^4QezgvK1ta>t~jGb|6aq1X*Savg91Ld|kktFn-yLd{u>AUHMv!vOMq44W-V%t8T zPfVD~%o{6i!?ii>6t$q2G05HV%nyKnl!(JkLY+M5-(VI`sdi~;v1h8Lr|+lZR5kDW zT{VXaQWeRGXx*~2(w=v^sB)xxeZ3Z<-OYkcSLg!jrJum=6?}B?W5NkXr*aaZPRbI^ zNxUSsb0?j_`ui@-Z#GvJ$S+d0CV5)Atkea{oGxb~0EA?Xc8ib8p zQQ|t)b!+pPB*#T-_QTdqI$ZvGyOGcAp|6+~io<@EQmwz|QoawD zHsq9zx7JWwBOPrv1zKWVgeL0eHmQRnKR*PG;2I2Y9DA8tR>+u1bKlVqnsKAg-IZLv z>Ao&x%4|(MiLVJ+DVeAuC4--gR~E%etu2dIzJeD+AtD}{rkbV<@ z517VBB{N2=B9Pa(9gN~gC{u+IA-Lk=zNmRwYz>NR+wz&gL|GJ$`CgDGVLkFwq z@}?qIXc#KV{4{fDS49Pt+1hOBdfy8c>k_2Qip1LC8cV%f=KWbBfga^E9RGxrYW;A3 z&yXjFNi*MCux~E4HDz7hv9Z>Y^kR5J^0kd^3c^szzxcWY<9Wkdp*6{O@O_&W*Ee4s zGCQKqliw>fH6|xUtS|FBU>9y0NXS|?o~=hQP58Ons@ZLpF8&(TrI84|?xLMrR??V` zpcDvi3e(2GeEewwk8V?m%+$#~HEq8L{Tcz~R#S7KGTH41_ z>apv?5mx9TtH!vM)DD(ZM~s>b^C8s3(ev8c53W5nrMy2i?zCs3-WbN$DP%tFE< zJ;XZQ#Phg?H)?@4+`8ngXT1**f1d771xOVy_cl_P^@BvNTYOs2@n*#~y3z8&$-hN)@4n zitbMeyi%n4w78S&#*P;z4?gU!Y=G;_H2k$X)qVc@`d!gputa{VYGskuD;sWi9Monb z&JvxbVu(s~Df&sez9A9y1u5{UVuVFNO>%c#*Mby%nv_i-x!h855r=}d%}c0QoG$1Q5=n3dMZl|Kb1PBq1{Yr;knGiT#Ap``tj%Q zh?^}u&bnp{$?Vlo$JCUiryBKo_F(~3c`zm8a_ii@G_+KQ#+PAOO& zOT+#aIf)mG2qr3x-mwy14rDX83Ouw4mVk(N|IbAV1Z-ekF_ciCT~9uE0$BhBhY5d= zZ>jIwEoZHmB=uan)kDX}%fEsFEvfQ`ng(NMx_9N6` zYa}sV$NlwLW5Fw&n_oE>`5VB3x{|tu;bN36zooJg0@d-0VMQIx^SjqBg=7t%z6KKo zW>(?IV-+Op8rPi&bnu@e)f(s)&_@DcI3?9(_$1@F{*?jGx4Cm%U+U=lGtbS~Lm2N& z7>q-4f4_{3zi-)qjgEE_6-O_HtT%_+Y;BUaf`S*4$i2{R`2tEuvcgx4PybucMZylf z0MmnIU%Ukb)%^8>Xmf%~HyPaR|H->)I=))OWTx8MAmr6={3B3!?Xp@CpX>~Qsaf{? z6OhC%pZU#9xbFin1|ZWY@9F!e>pIy-NrdM7Yj58i%DJEgn}f|E!EsQBSzGZ?y4u&c zIxlYE?$tY!-h4gQPxM+34L$d^GRh(0+4}d3i#4I^HLC_GD+Y`B)J-CC2Zsx}K6tfp z=jk+Pt8OUTP~#g?>3$1~>HMy!7e>^H;wYthY{WC?OD&8Hcxg@qcM?1xfeEZj*|aI> zd#I|2EgByWvNeNkrgAZP(XFEF`%dwY^xWy@{QMAK$51LE)wQ5ks-<4;konSe(<<7D z(9qom55i?WgmdTpgxp8M=bYD*x6q&p=g#myOgd}NE{wmw|5y=|UI7*iVyDs8neKZV z>?|pBu1tepE$AV^MOf|n?Mw+(aKZo#0UZ=@yD9t@IVPSaETSez8F27zFwC-F{3f@l*@!xa;lJeqCg zB;h;2%NyRW7k<)fo{CAChxrgs1%3y)k_XC>a`XKRgJuk(jZY(#^eXPwg3CXj$#QW1 z+h9JETb|SCBW_yuhsK8?>?l)q`beO%MR4EpJnjz2IssI5`{I3~E$0xuj$*!@X( z&;g0C;H1|5uvJL4V4r{Z>MT6^1?OY%dxfowczty3MMp{2c#>qEc9~(!&Zi%U{Oq=z z-LE5kuG#Emo)5NzcIXSWi**{TjLn`sRTudcAdT@yBva@-PTv@2Ewe-3`!aF4u zJw#|eXXbaS7|MP0dUmXW;pE*-DV)IG6n35sVIxXRY(^OhI{9;U|SAbr|$#CopHjV{22u~q@vLdJo33Wo04&&m# zI($0OPJ)gZC(Y049O&=AWP)c z0)e4qDW2>xU{%Nz`z2`jOM_-c*^3OJ^DY@!p^w=`wByQhJSy!aC@czUm^hlfeVgrPHgnXA1=wFB*-1|N0?Za4rD;5jrjCV07fv4cc?8u)8K^h?lH zcsO`U_IjRNKWE+Xovl+d2B^e>wromLn!(E3QUET!Arfqi@xl#&+kOP50eoIOok>)5 z;=qs)2yGeuM(ROfPFv1c&rr81Lc(LOa{8IVU62}rVw6N3rd&S#aaEVPrORF)WAaIS z`;;opFt{*(&Tg|d)ohP);6kqF7ynGEw;rOQM#sdA_hHr6llbvL%XLVJIhx=;0{GWv(`{&pZCi}71z^er0-Ox?c5r=mwEUEfeixdtfT^96NS_pISgAd zvAT*3!I|bivV-_>h_54*{6(oUb{PCtmYhnlB zi{_y-cz>y~X9F(RY~b{HXPkZl4BtQMo5DM^H9{L8^t5+$>#*Cdugx9W+`ch|R_laWlWU$nT!y;*@xMPZ zbBUDhtiRgsZ%Le8db0lU==!NT7bo9l@4~>k%bfunA|&1ZJGrWQq`NV>bysv-OP*Z| za=$uPSG?Y3zVW#lR&svbm7aG853+d1?GCrtSy+CyQHQ{0@W3U{spnpG+;?{*MbXW8 z)tKm7QU5loeWWvxx(o5)fs1LENA_cv0>kyQuScDG6dO}%Ja#%mQzn{LroV#H-%;-e za*1vfZKQg%u$Ajj2qNn0whqC=zt^4bWUTFB^H~STr0eNtUb`>dFDD=g{_uzpKk!wPhny4sd}^VS{^~C-Dv1o_3CR^Ppnu?`xl%~ zzPkAfzeb~NB)eJ0ZjCSR{iS^lVngRo#YJxb)Kzbj^YX+&4co$XPv=AcRjkINGG=ZU z)Vd*)b(QJu)?w^0ya@Y!-l=Xe!2VZIlR5HZ55=po$im)TGV&0D<i)2&mgJMFWamTJY7Q*v6wP9|gsAsJXG*ov9kP7l|Q zSJA4c;`Jzq>pzKVoqf^2;>xA4q4NnizO106mChn}vviaV{D9JwG8nR3R6p~PZ^Aw1 zgXKS-NA({bB+;!yPaK`~px;~Mp_qG%i5^vn!Am#|Ap{J!xmD@2fq4M~I5y5z)LK5a zrOr~8QW_En-{Kk2yni<2{RQ=BXEX^ff0)ECyH$Exvwu~r|9s_+c&sM1VCB5eU|`=f z(Z{mGBqyf%ii55w?CQu!vrI^OJPTTKJ88;kme#be_1A-x!t*-WMN*ZY4hdgoM#3Td zJ6pO{$uG~ccGQ3yfU`Z+^E7P$ezHqr{pO9U@XS++^qHF6 zh+JFvaAS3^c}hXfTyw4K zj#_$|J%4XJeLoXi6(!I-uWs<$j0SNML^W`?UxfD1i5!UYKtF?y3~r4o>Mxo+`#Qd3 z2;b+a@kmM+_H?H7B~a7-?nfF4#=Fws-L7I>57QwVyXZA3%K=)!f0h2C;&7Ss52w2H zpx2URvH~4S29yxU$NFkqArlE#)qs zok2d@*UOEN7gDZjGPPWLJ%Kb=H&=QcFnB#VU7?QS~<6x{y?akx-m+oU_A6G@t{fve=`7dI!2e^ z0Wbk|K3~x>*~+rFcZ|Jrh#}C$VMw)+dA5wQbgdugT^Qn9SCup%Vz}%G8yp<0?_=e_ z#Xl$N@_nCdOOUI+vr?M#>J{AfgEsyVQdQ4#wNs0FvUVrK{{JqcBc3LIT@v0MzAWkC z7S{C;fC44Ptdss>;mmeBY)v~^16Y4$VKcnay5;0O;=Od*XUYH!^OV#UtOG~V?rP0e zlXEwzex{h>jPLH{8U75R{u7yegTA^99nyd{1!!9~sAv75 zkZ?^8+8aDTpr&Rv)WHG=^)mgl7VWR~_P(cgJP-jN2bpGGOV@6qYyPN*;Bnn$Rsr{^ zsSOtrud)-P^EyfX#iw2k?NU&v|9pwGtI)~)-Y05cnJrgIgXw1Qz^vJ=wi(Psk&L*0 z$ag>Y$=u9kV~_TAB{`Qy%_N8C0im}gqJ#goU5>y1>TH#e&WCR*DLU;@>dwEu`iy!h zYTFiv_Ui50i3*pIuHe{abCA+Zi3alHTRwLGw7hLOuzmeaAI0y2VHRX09bbgB6~B8~ znt4`j)&7lW_J;ZuxX^K;%!|6h-$wvfyRU!A4?VUjo%W6S3t14uUGxb#1o z9RGUj>8(Q(R-?M3HlYY)BH_n8_2m7v!1dZ-V1B=hI?}P316_o8SyBL$o!h?DOYHEG z5f4pxdu!a7|1W#_Sn6)iuzxHIlHZc0trHrVyMmIbRv-4+h4a`wbrgDS3OoKuL;X*o zg7eOq@-H%eOobh&zg=K{C+FIk+Y%<@RpAqwN}9c=o5xj7y5)J7t)kxPmt zM4!FPw}$&P_B46Aaj}%n=W{whdL}Bz{p7v;k#y=(mYAa1Xn%#ag;=PnD4&{EDC#=HD0{gvPT(~0(SQPZxtI{EMyj1Uca1N z*yRKlbWFrUh>_5=7+HL+7uHu^GH@cWg5D&JU+lI9Z*`dQ?1Q08n=FU5eHEkpNE2e+ zP7w=BZ^uX{|NCA>+%@~Huy49QAI@s3^1CcAlqGJg7wWWkUulNvZVw4DgOk!^{j4Ft z(h)cY^CFj?*7lp#0kjw>S(x=Z-IO1kt$0)Lx2$YnhQj3VugQ!)6ooiVOlRti=`v$L z6NAqYWqwK76Z`cc`q)B9=Ch*;xi3!!!xrs=fC6)t3;$RpWRoi-M5gBn z@Xdy^4-(%QLu)WV0B1pG`e#ogYinpDYOW@&?+o&L=AO~&a!Z05tB zx_IH%v~X@j5_UqgO~eFh!zPAJ%VQVb<0kuk{Rw>Q z&j{_nNYTM#)~NJf4>ITwifS3*B@aubVy2pb--GzC=?R7w`}uLkrHY}irPI7p9Oq{H zJ!6EV3f9qyO2-QeOM*3-; zuW4n_Iv5AvIB#TP9MR6gyhAun?tJ}u&0yvd1 zWh142z`o!3=gBUON=Q1MkFY2*{pNb;3kN}p22G5Iwf&pZWyB*Yk+>1Uvw{4O^8D$G z1wcrx&YFi<+impVrw55Cz!twyqT~Upt<2gGj+vBjJ^fk0mE>-824F3F>WRYXFFKvE zfaB;NS8|uHkLFnZa!fWe?&}g=)P-c-|FP4y(*7CIjr7Ch&CG2et*#C!D_=}57u2eR zumBX59_}pG@ZVqqPlrwdu0tY@&B~JnLWdCM*TqGRZR%V5#!zu)L_L(!o*V?PUr>#eBN+ltdqwcUKx{T+*LAcuRq38e4#fqNdkhelb zDRNM6VC44YLOozVK)zZ@aw88ikE+^N{JDtGEif<;JWaB9SL>Spj=Q!oA+(tjGWjg@ zVl!Fw#M)odoj+lnv@(9jlzK3jy=xn>lazZBsxZr`s7p9|7nKHp=20~Z9Es&W-amM) zdGYCkzNN7NVH3K_lKvvZgDSlFwbT>N%!Td9)?W`QXGO@Py7z0Y+~1rC4mDQts-aKD zN;)25Go1=?(!fL#<-Rv0Iv`M#*yt*ru(1fd5_j$4OFBAhNj^f$eC`i?n5+MDh`Apr zrhvNx+)O&#B?>@9aD|z22SJzX_oa<=wL#W)|J!X;Wdkm01TVwdUq!2#etkkkl2DCX z($R(YpigjYn^N}b$=X^)M3dFJU---GZvVNkdEl%KvG3ixFu?Tm3qFBS(gi651zZy( zLJu=TYw4J>?)bx+(8FS&&v|LE>?{Y=45M%Av+YvAy!z@gBSQ2Z5G1hi>ENhy4H#57 zH#2~Lg460%3j^Z$tCBa${)1Bgr)izP=jbGNqz{r$SWXnPV*i>?EFbR)i$8Ew{t&$#(AFU1oPQzd^(jWh4tlrS&2@~Rs&DS3Du~}h^UZBB z7q*tko7RFFfhMVIUnaFR+!5A)kGIB&py1-lDjHbgwZRfUhiL#%0gvqxhJ{QRc%NEJ zgO1NX^?orCUcAfQ*>#e4Tb#db3Bmp}UO(&Id#;fej+x0OihgXIaF+%pgm;*NFP}bR zE326V(gMOC*PUSN<wA~=yhSJyA zBh)PbFMvs7CvQW@j}5+0e$~hd-E_(2wjoc$_^9m;tcu0G&}J6 zoRZhbjD=T7NQk?&7h)jM3M?yyzJgC@W%%=&g?7 zB?--yfnjASHV3}7$n2tq4Pw7j5gUVLPBIpvAJpz390C=zj>h!9V$)OQVr1d1VSp8f zru<$=jXJOsz|>ynVvY5D0N0qT4Ky!S;DbGS&+U-C+b?%SxL`>~Qh!g$9t$7}j73H4Ql%jqgEmkUz$*c}3D^K~ zyA&L9r}9OYyNrdn*E*~N{{?qsFGI6?(7KmjZ=QeF)v#QFe*vn1R;b0&LkomC3REV> zRZ`Zwh>++hKwS=?$rWEkz9m_P!ef7oV(?VY!DR%UFW}ed^?x|sI#VOYW{32!XvNz9TBu*|iqoXF{>CaJlut=Z+QArX(X~P=mhE~S_VMyWl2>Hl_PO-| z)bo>&TraLKbW&1+=04M}OKmKFvb9uO zLmn}33Q*yYnXoE9PCFQP{0de;IvwA%GSvf#%zm2EYg8KHBN7_`g3GA=0b> zu4)EH0jNXZCfX{S!%HQgb^1%@J)IbVEuzlz35qQ~>Tu+>p1$@h@OGmUGzb3gcddoa ztY%Dbc!O#tWwOODA%Pu{$(00xIAO>OdU~D&7@^Ce;7r$rcIh1;FXg2s|DIQDh@) z_+LA|2-q3Wt$V*T20a>c#ZH?#ubFdnU z`lDO<3hraCX43ugzh8Cd4WkLnvV92XlK_oq9SyJXK}QT6;z~$K;Wls57nq$5ffOh= z)Gh32%jvV<4whK`CfVpe;e!s)QNeZ)y1 zt>qJ=0zVo``7V(2Z0F1ya5wP4see=XcwwLp*9Dq?=00LZ1VbxN;Jcvl35cTL&rsKg z@BrfQs$#Y2Ba8^`k4a0t0Nrp6cZ}M_5~H$=%Cho0FXsVFg8FywIpz<1{~~54aMVTw zvclcWt}aHuuLPb7hR02Wi<+Qq3h1wLAF-qPTSdWo(E$yjU@oBlnJbj`V9CW^K;1wz za#66r;W+~C^96=0KSat!nyyS}SJwb>kirG$%nBjac>1-Y^cU9S7%Ss@??PeLz!5Jv z-(~fj_>RK_B zW8;I6osU9*1~QTPj4mvel9^)!pch;>eHK(#+EAECG0jCfNA0AkSpc&fE@LRZ>-oQf zNxB5W-fW}KaSiKqSsDC-j+pW{=GH<~eMV@(;Hd@f#f+7;>E@unlLr0P%&D1mqrBdI zU%*HREuA2s2sX=)ZfDzP;A{9p)uWuS^Lv?taR|7|76n70@ETGRO)FxH^W}WtW&nx0Dc+e`PPyTJh=dW2_6gYwwX6s$lJ@`GoMX!ZvEhli@*mB z&v=x1z)kJk7hq3I9C!lFjsQF^r~{oS|HEnZ)zR46EPsaSu3Nt2uKJHc{o0SJ%>9I+$ z6+L)IP40CM%xM5R9?$_`!y3Rr?5ws&kEF4G|2?t(5;maJ^XJcV2Iv{9zWQiNgDQAZ;K< zz#(*7+U4)EtV#7u$X<5)mla*q3CVBJmt9G8a?MaZ@4Z`htw0Zyt{Tw?EIo`ciy^*U%Vx^vFI=I{EFmfFro2UmT( zxapev{i;Cr#XO`fk^Fgw5eH6FTbJ50p`mxjJNtn=#`yOg)B%= zfb0nQJ8vmjK&CD9EQ9eOq1k88;~=|}W5upq58^4w48~=iQJoT+I#Vz>o1G-}3Hld0 zJi&0!Ods1-6i`K@zT~D#SbmL^F2?Q-c2%4zb2!`oNVepFw=neTD{vO^UG;108r1RoIa&%FJ1%Y>@T z4B%vx{~J)*GI^=rq&CXe12IF*!?>KTkKU=7-eMI5FGv-%aFAgY^_{z*Z2;6FwbOy% zA)Oqifze6GbkVCfRnksqXlrXvYsned!FQ@?``apgVu}ES{#}l{=hekzVmD;{1md8XcyTBZ_ zc1H(*7fio0FoUsH^h$7v!J66e182MaryY zT-NV2C$;^OwGi46;7EHG_?>*Ygz_&n}>HI-9s_57^*mwyzF zjz|zJ;O+$QrKX06C?Cv)EP%&krXTh&nHw6N-6rDH=&*gs7(@e*XOAWw+8d)Z-d%?7 z(?vS$eMpokh1T8yc%&N&95F4h7D%nQ08UWwS)K+v7V>evdp|{dB;Y0U&juleo9$i3 zy~m#*0=qPiTHf(U5N3#ZwO%mYXw{ODW~i6C^(eNxH=Kr>&v%{QZff(sVAkoMXoR`~ zM1%#J`hPJ*=m~tfVq0ce^J6;y9hoP^qDKEF%d?6!Ivi}p7d?pEyoDDKP8-;<=wd=z zB@XD!d-KQeBWx_6tv6G8zFra@(3<}Xzpxez4T~jjOUlS7G1kf-YB{{$@7jhi)#L>?WzU z2eQEJj`+@VT!G{6I<(w)e{X|!e(S4C${omKUE*H{TWXRz^+r?3%>Z%;!8kHqgdECJ z0^8f3@0T%OczL92I}(P$qGuSsjL@0nh>zV}<<`23&gcqD3+%osBc};!5M)k$rK)sG zBoA0R4+?^5z*5j}>F>(W+^19g(^tqP_SpPMdozQv0N>rGA8y|Q_i~flCln1|!09Op z&|5iO1%-N?vVe#`|9(39OGpS6SRRw7pMfoWxraKNME#1FpWWBQCDT0QbK{1*x|iGf zSRmlPA0*f>(2fRqN&a75Q=#*V?i4g)MUGk@9di!WP-aXLGx7UCs?gW90@GzJG&f0h zI{rI6IQ+8E`l6)AgNiX!74g^h1s-&z2wGuok7LW0tD3FGu~#+S^_E|CIe1OF z^?}dvjTyVMj+rz!Ob}nmJ&#T2~D@+887&59l6@_2-j{18F5hEz6cw+#dURIOW zv66-Cao)B^YLoV+Bhh3<-0>aXG<(=z6>q~Y+-NNIonRqh#3jODm!4S1A@pNQda&EA zS}SMqwD9Q&15>EWg=5BSGYZv*eL><;#&}*;{G+K#GPigXL&1mT$KdjqrQ;NWJ#(zH zB)@c!(8jY!HOYE%Y@8=RjlP;yu;aaiz%n_aE4jCGPEt~`e_>&v&XxbGA?--3=B-E1 zkj}So7|ZE*v4PS;o_CF#o2aZ$jHps+bWmA;5%N75gfqXj$zH7opj7oZt5LjUsM{;sc-oHHYP7G?_;q_a&q$M`Rni8Jsj5~n?VI2 z!OHYJ;p`Ic=Kj9E&BZDN{7zk{o@MI9fq}aycg_M{QhBU;nmBWB=sj|gcBdxJPVyji_{o_wBUb=LW41Hwa&-F|u zvxY1{15{|8$KL07d=4I8-_T(3*f-*5ITUQ^7#R`qWcVX|Lq<-Stjdiry=_=sTpjeZ zE6aMbAK!dlbEC@@6$+iA;ptdMZzrDDSJ+d{cru0Fh?QFgTu|>GEnI#HtmS#xls>g# zZbzEMG>3cR1}v7v8^J_u=@jsj$*(ID-fc*!Z=ticZhh2FS2b{|z6qG?dCtE3xA_~@ z_rw+Q1tktDAv4Zpts~B}{`45)p~2FV2GJd2KdP}Mr} z-e0RKNF!6p!t!Y=fU!VC1T|^+*{j5^P-6pgX3JeZ5q9YZ+S9SkJ>~`odi?3BJu+1< zgJ}o!^b+5_dzVkE%l?Axob5&^uA@TJivCy7(^ki$9$I_8e`#*cAQ2M(dh%#zdh)SW z%`acRsBL0HDT!PX%Njd>N+RJM(2_3hIQ`avZkRgFGqjhfoto26RRTDkGsL(1p@VgC zI);XYWP8VS%9d5JsJ@3UbSbYJf^=>JP#Es8W_=w#-P&{Q`adtjlpu6&Kb^5Q* zYkAfS)qb$~rm0H7M{>K^CvKbN<$EdsQT0UoX=Q&1(s7<=>{m!(gQFM@pKIXKu+E8? zpY{bnX)dEz)(u>Q#~Kc)TFso9qXm1v*VbDe$>&g01_lNy^M3ef-hvQJ#5sp_Pn&d4 z!10%bmrZT_<;8uHpUyqlYt_0T$;_`R*N#R^7YDZl|?J2uk;SxFPt}g)6Arb zA`-*tnVNX}HPNgsv7_vvHA|X2muB^dPMjn>*KZz}oGju!ussIjySMcR{53N?{H*4_ zp`l^TsR1xzQ8AonqjqNu>z5ktX({W)+#mwC!FWc(7lq^zp^z@uKT~Psf6ldXw_Y-b=2k2j84aNX zU`nTa@*}=Zd<2O!^+%P|LOpHUdR}(cT5MJ`3o=~Gv<>hevUof; zxg8rlJ#6zPd`daT2&-T;*I+$LOSVlA>M+?1`i;~JxtQSC*io@ZmG{vhvd;=uEYrW* zKU28Qv}HbD3Z&MI0iZd^fQOywM&~N-*EC7+ykf}}xF;iBTn6xyFxIH;rcImPx089S z*_M=OX?*1Gn-+@Bh#abwzwGFcTPd5_9#9tEwO?ZM7VSF+5!9()MeOp3vlHHC6AR_W- z*lf=_!yop@rb;;k$CD=N2+LQT)(&q{6S)_>C3dLGPapc{74R}I1fn^m7OC$e4cW%< zl_;L~snyY~Jc{ESp&6s)7*$wO81Dhg7sakM?+aWD^yK!k@h@T9xJZzmI8l)487BuR z#Uhk&EnMOZ%ntzrF{-(2Hb>U*y4!fGRn)D>*)<#Utj2;+15qp?`6 zuk?ehwwJ$M&*{S5f9^jlOkp%_7tfLxeKFpczQ4TiXV3=M=sC}YQG4>q6E)k+@@(Ej zb1QjMz$^g#$&!o z>^vQyM@ZfX+T5q&+aMt_jd&~H@JZo!BD>p4?~0AQBh_MEoA_F~k2pUPL~kX+sN$~H^}C`i0D zWImU%t+9Bvmt7+R2u;lIVf*Xi%+fu9Bjr&U?OS4Q@eP02Dn`$`FAOO1Z|td=-C8r- zpSVFn?CulaU*R=VK|rxL-ny+ygxDtonEhy>My4f*^kFuK6#_EBJL$%=`f>-AbD&c- zG&H=7!jb|GRF2cAtl+xJ(&LPNZ!%rsOwr@k5piQwt-)&oO%ERX)}!*@l3ovW4$tBDd#SwDlvgo=SMZ;jZ&rNWMRL;aEX^s5YPUjiiyb<;{v-qZ&q-izKjOAT^ zfWxT(58Xp5Zz5KYzNrya%b@tH_)Y8v$m{ zV*qCVn0FH^LuUtT5%;Z#GQ`(Yc{63D#Om;=C(+HLQF#6Elg&fqo1h=<$*>6)cNi}O z>iE-BHm%{Yq-hW7sWm=OLev2bb2r@hT|atQHRbe!f%3qC%FA8^8B=Wfq_DGx7bZf< z-C+xP<|+MtuADQVWy5f}Ym!1iE4$aXmIdV31n99{h^{qF)N2>K=FnJ9_bey~5hJe< z#EG14F=yT*MsBZxMhS8$gzoXWF#W>%(4e58hgD`K@1e6WC;%(!d+icp<|28=aZHbAY7ftq4MhzW%g{{+OJKXOFDEQ^b_gBG<{QRdZTYaU22T5$grepcoS^+T9`@>{o} z??ch0unZG2HKoC$pQ;9Tb=!^buh(Xd>A|l8(*Q(uWn}I+JhEvdO^+J>luZw*VimYd zqy!|6bbqm=K3NrsvtxC4kZd`mdKQQK^45q{skjTb>Nm4q5wi4|i65sTX+Pru!N$h? z3Fqy-$F!?Z{#{604h7{{cs&EAfUd;kEOTIXNSiDRy4$|H*(G;f--NAJ#4a%NY1vI) z!9ti<4&Ox~aGKXTqOy)LG!>4~#qDgobiM@xiWcb7$DghZ&)JjYt(9#`Y)=GDJo5{E zrbh8D0Xc>V&-i3|BApkQxb z1VO5jhC<3Vi&UKS1Q=LA3E=w_i$4hLwfgKHFrWH6OS}>5q`4Lhtwmb7Z>bD_x%4yA zc9q*J2Qa&CAigf5tt)i|Gs@Q0)vYW-1D-}2rYKpEcBz}caoPCu#b2D7w#!tl;UcTM z`c|zkohZARIRhS!%Z&fZsKfKWm41j*sAl?a+)r7<6|pVo1qfs#wE zk&cdoocJ&+0vIiTv~*uDfoRdHdHvnbuBfTGg|0^F1!fBNHkLPq&rm2C|2!R4NRwn_ zt0OlFq9V3Wtr$=maBP6(jvYHRHYSa7wJ%~Y($MA*vJ=Kbv=uUx8m@KDQ))&qXF}Hk zn{%pW_-IBH`H>OvTzh!NhYD|Es#L&ovsAuI3;z|akk>L?7aP;*PqJN&HN?B_?6c>9 z%8q;iur)Y3OcLO=+@!=b8qly<1^r_=e|AI`;b1Sz@{W9$jyrJF{h9qYFJSp$t`cHTNSaK)CmUt}qPbG9b3 zf`LYcPN0IU5vf+Y@+FCd2F-v}S~yi}RRw7ga0u*kRLr2HNXEE8<>*K^7JL9NukZYC zZ@2Z_7LvC^Z0Tk^)HUIzs literal 0 HcmV?d00001 From 4d91a42eee3d89c435f5e418f1ed82314132d0b3 Mon Sep 17 00:00:00 2001 From: Oliver Strickson Date: Wed, 10 Jun 2020 17:02:41 +0100 Subject: [PATCH 084/155] Remove duplicate git clone --- .../administration/SRE_Upload_Git_Repo_to_GitlabReview.ps1 | 1 - 1 file changed, 1 deletion(-) diff --git a/deployment/administration/SRE_Upload_Git_Repo_to_GitlabReview.ps1 b/deployment/administration/SRE_Upload_Git_Repo_to_GitlabReview.ps1 index bbf24780f9..654111513c 100644 --- a/deployment/administration/SRE_Upload_Git_Repo_to_GitlabReview.ps1 +++ b/deployment/administration/SRE_Upload_Git_Repo_to_GitlabReview.ps1 @@ -29,7 +29,6 @@ $_ = Set-AzContext -SubscriptionId $config.sre.subscriptionName Add-LogMessage -Level Info "Creating zipfilepath." $zipFileName = "${repoName}_${commitHash}_${branchName}.zip" $zipFilePath = Join-Path $PSScriptRoot $zipFileName -$tempDir = New-Item -ItemType Directory -Path (Join-Path ([System.IO.Path]::GetTempPath()) ([System.IO.Path]::GetRandomFileName()) "") Add-LogMessage -Level Info "About to git clone " $tempDir = New-Item -ItemType Directory -Path (Join-Path ([System.IO.Path]::GetTempPath()) ([System.IO.Path]::GetRandomFileName()) "$repoName") From 2f254766041d85d0a7b9370d2a963a1a24eaf115 Mon Sep 17 00:00:00 2001 From: Oliver Strickson Date: Wed, 10 Jun 2020 17:11:47 +0100 Subject: [PATCH 085/155] Name 'reviewUsersGroup' consistently with the other group name variables --- deployment/common/Configuration.psm1 | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/deployment/common/Configuration.psm1 b/deployment/common/Configuration.psm1 index f5bd6e074a..d4a35cd710 100644 --- a/deployment/common/Configuration.psm1 +++ b/deployment/common/Configuration.psm1 @@ -300,7 +300,7 @@ function Add-SreConfig { $serverAdminsGroup = "SG $($config.sre.domain.netbiosName) Server Administrators" $sqlAdminsGroup = "SG $($config.sre.domain.netbiosName) SQL Server Administrators" $researchUsersGroup = "SG $($config.sre.domain.netbiosName) Research Users" - $reviewUsers= "SG $($config.sre.domain.netbiosName) Review Users" + $reviewUsersGroup = "SG $($config.sre.domain.netbiosName) Review Users" $config.sre.domain.securityGroups = [ordered]@{ serverAdmins = [ordered]@{ name = $serverAdminsGroup @@ -315,8 +315,8 @@ function Add-SreConfig { description = $researchUsersGroup } reviewUsers = [ordered]@{ - name = $reviewUsers - description = $reviewUsers + name = $reviewUsersGroup + description = $reviewUsersGroup } } From cead499d00c0a0bd5e5f79e1384a2084f7fa9c52 Mon Sep 17 00:00:00 2001 From: Oliver Strickson Date: Wed, 10 Jun 2020 17:22:34 +0100 Subject: [PATCH 086/155] Add (and use) function for clearing a storage container - name to indicate more clearly that this is destructive - don't create the container if it doesn't exist --- .../SRE_Upload_Git_Repo_to_GitlabReview.ps1 | 5 ++++- deployment/common/Deployments.psm1 | 13 ++++++------- .../setup/Setup_SRE_VNET_RDS.ps1 | 3 ++- 3 files changed, 12 insertions(+), 9 deletions(-) diff --git a/deployment/administration/SRE_Upload_Git_Repo_to_GitlabReview.ps1 b/deployment/administration/SRE_Upload_Git_Repo_to_GitlabReview.ps1 index 654111513c..2bb5b7dbba 100644 --- a/deployment/administration/SRE_Upload_Git_Repo_to_GitlabReview.ps1 +++ b/deployment/administration/SRE_Upload_Git_Repo_to_GitlabReview.ps1 @@ -61,7 +61,10 @@ $sreStorageAccount = Deploy-StorageAccount -Name $sreStorageAccountName -Resourc # Create container if not already there $containerName = $config.sre.storage.artifacts.containers.gitlabAirlockName -$_ = Deploy-EmptyStorageContainer -Name $containerName -StorageAccount $sreStorageAccount + +# Ensure an empty storage container of the given name exists +$_ = Deploy-StorageContainer -Name $containerName -StorageAccount $sreStorageAccount +$_ = Clear-StorageContainer -Name $containerName -StorageAccount $sreStorageAccount # copy zipfile to blob storage # ---------------------------- diff --git a/deployment/common/Deployments.psm1 b/deployment/common/Deployments.psm1 index 7ebfc153c1..be5e5b7f79 100644 --- a/deployment/common/Deployments.psm1 +++ b/deployment/common/Deployments.psm1 @@ -319,17 +319,16 @@ function Deploy-StorageContainer { Export-ModuleMember -Function Deploy-StorageContainer -# Create storage container and ensure it is empty +# Ensure the specified storage container is empty # ----------------------------------------------- -function Deploy-EmptyStorageContainer { +function Clear-StorageContainer { param( - [Parameter(Mandatory = $true, HelpMessage = "Name of storage container to deploy")] + [Parameter(Mandatory = $true, HelpMessage = "Name of storage container to clear")] $Name, - [Parameter(Mandatory = $true, HelpMessage = "Name of storage account to deploy into")] + [Parameter(Mandatory = $true, HelpMessage = "Name of storage account where the container exists")] $StorageAccount ) - $_ = Deploy-StorageContainer -Name $Name -StorageAccount $StorageAccount - # delete existing blobs on the container + # delete existing blobs in the container $blobs = @(Get-AzStorageBlob -Container $Name -Context $StorageAccount.Context) $numBlobs = $blobs.Length if ($numBlobs -gt 0) { @@ -346,7 +345,7 @@ function Deploy-EmptyStorageContainer { } } } -Export-ModuleMember -Function Deploy-EmptyStorageContainer +Export-ModuleMember -Function Clear-StorageContainer # Create Linux virtual machine if it does not exist diff --git a/deployment/secure_research_environment/setup/Setup_SRE_VNET_RDS.ps1 b/deployment/secure_research_environment/setup/Setup_SRE_VNET_RDS.ps1 index 3ce9f165ed..e12a03cf61 100644 --- a/deployment/secure_research_environment/setup/Setup_SRE_VNET_RDS.ps1 +++ b/deployment/secure_research_environment/setup/Setup_SRE_VNET_RDS.ps1 @@ -212,7 +212,8 @@ Deploy-ArmTemplate -TemplatePath (Join-Path $PSScriptRoot ".." "arm_templates" " # --------------------------------------------- Add-LogMessage -Level Info "Creating blob storage containers in storage account '$($sreStorageAccount.StorageAccountName)'..." foreach ($containerName in ($containerNameGateway, $containerNameSessionHosts)) { - Deploy-EmptyStorageContainer -Name $containerName -StorageAccount $sreStorageAccount + Deploy-StorageContainer -Name $containerName -StorageAccount $sreStorageAccount + Clear-StorageContainer -Name $containerName -StorageAccount $sreStorageAccount } From a878316bcf756a41ab846c5a1b5ca539cbfbd716 Mon Sep 17 00:00:00 2001 From: Oliver Strickson Date: Wed, 10 Jun 2020 17:57:44 +0100 Subject: [PATCH 087/155] Rename some variables in GitLab upload powershell script --- .../SRE_Upload_Git_Repo_to_GitlabReview.ps1 | 26 +++++++++---------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/deployment/administration/SRE_Upload_Git_Repo_to_GitlabReview.ps1 b/deployment/administration/SRE_Upload_Git_Repo_to_GitlabReview.ps1 index 2bb5b7dbba..5e5f08d479 100644 --- a/deployment/administration/SRE_Upload_Git_Repo_to_GitlabReview.ps1 +++ b/deployment/administration/SRE_Upload_Git_Repo_to_GitlabReview.ps1 @@ -1,14 +1,14 @@ param( - [Parameter(Mandatory = $true, HelpMessage = "Enter SRE ID (usually a number e.g enter '9' for DSG9)")] + [Parameter(Mandatory = $true, HelpMessage = "Enter SRE ID (usually a number e.g enter '9' for DSG9)")] [string]$sreId, - [Parameter( Mandatory = $true, HelpMessage = "Enter repo URL")] - [string]$repoURL, - [Parameter( Mandatory = $true, HelpMessage = "Enter repo name")] - [string]$repoName, - [Parameter( Mandatory = $true, HelpMessage = "Enter commit hash of the desired commit on external repository")] - [string]$commitHash, - [Parameter( Mandatory = $true, HelpMessage = "Enter desired branch name for the project inside Safe Haven")] - [string]$branchName + [Parameter( Mandatory = $true, HelpMessage = "Enter the git URL of the source repository")] + [string]$sourceGitURL, + [Parameter( Mandatory = $true, HelpMessage = "Enter the name of the repository as it should appear within SRE GITLAB")] + [string]$targetRepoName, + [Parameter( Mandatory = $true, HelpMessage = "Enter the full commit hash of the commit in the source repository to snapshot")] + [string]$sourceCommitHash, + [Parameter( Mandatory = $true, HelpMessage = "Enter the desired branch name where the snapshot should be placed (in the repository inside SRE GITLAB)")] + [string]$targetBranchName ) Import-Module Az @@ -27,16 +27,16 @@ $_ = Set-AzContext -SubscriptionId $config.sre.subscriptionName # Create local zip file # --------------------- Add-LogMessage -Level Info "Creating zipfilepath." -$zipFileName = "${repoName}_${commitHash}_${branchName}.zip" +$zipFileName = "${targetRepoName}_${sourceCommitHash}_${targetBranchName}.zip" $zipFilePath = Join-Path $PSScriptRoot $zipFileName Add-LogMessage -Level Info "About to git clone " -$tempDir = New-Item -ItemType Directory -Path (Join-Path ([System.IO.Path]::GetTempPath()) ([System.IO.Path]::GetRandomFileName()) "$repoName") +$tempDir = New-Item -ItemType Directory -Path (Join-Path ([System.IO.Path]::GetTempPath()) ([System.IO.Path]::GetRandomFileName()) "$targetRepoName") -Invoke-Expression -Command "git clone $repoURL $tempDir" +Invoke-Expression -Command "git clone $sourceGitURL $tempDir" $workingDir = Get-Location Set-Location $tempDir -Invoke-Expression -Command "git checkout $commitHash" +Invoke-Expression -Command "git checkout $sourceCommitHash" # Remove the .git directory Remove-Item -Path ".git" -Recurse -Force # Zip this directory From 63e840136457d5e9d16e821f020832550e1e2e84 Mon Sep 17 00:00:00 2001 From: Oliver Strickson Date: Wed, 10 Jun 2020 18:05:10 +0100 Subject: [PATCH 088/155] Remove all default IP addresses in the ARM template for RDS hosts/gateway --- .../arm_templates/sre-rds-template.json | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/deployment/secure_research_environment/arm_templates/sre-rds-template.json b/deployment/secure_research_environment/arm_templates/sre-rds-template.json index 6d61413870..8fd65d7b80 100644 --- a/deployment/secure_research_environment/arm_templates/sre-rds-template.json +++ b/deployment/secure_research_environment/arm_templates/sre-rds-template.json @@ -46,7 +46,6 @@ }, "RDS_Gateway_IP_Address": { "type": "string", - "defaultValue": "10.250.x.250", "metadata": { "description": "Enter IP address for RDS Gateway VM, must end in 250" } @@ -78,7 +77,6 @@ }, "RDS_Session_Host_Apps_IP_Address": { "type": "string", - "defaultValue": "10.250.x.249", "metadata": { "description": "Enter IP address for RDS apps VM, must end in 249" } @@ -110,7 +108,6 @@ }, "RDS_Session_Host_Desktop_IP_Address": { "type": "string", - "defaultValue": "10.250.x.248", "metadata": { "description": "Enter IP address for RDS desktop VM, must end in 248" } @@ -142,7 +139,6 @@ }, "RDS_Session_Host_Review_IP_Address": { "type": "string", - "defaultValue": "10.250.x.248", "metadata": { "description": "Enter IP address for RDS review session host VM, must end in 248" } @@ -792,4 +788,4 @@ } }, ] -} \ No newline at end of file +} From 18f37db28c1ec9628f2764d40eb935ce11df0ee5 Mon Sep 17 00:00:00 2001 From: Oliver Strickson Date: Fri, 12 Jun 2020 11:23:26 +0100 Subject: [PATCH 089/155] Delete misleading comment --- .../cloud_init/cloud-init-gitlab-review.template.yaml | 1 - 1 file changed, 1 deletion(-) diff --git a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-review.template.yaml b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-review.template.yaml index ca208d675a..cc09d7a2ee 100644 --- a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-review.template.yaml +++ b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-review.template.yaml @@ -2,7 +2,6 @@ package_update: true package_upgrade: true -# Install LDAP tools for debugging LDAP issues packages: - git - apt-transport-https From 6b676741796c80271e3be9ac0ed01ce6c0947cbc Mon Sep 17 00:00:00 2001 From: Jack Roberts Date: Fri, 12 Jun 2020 16:22:46 +0100 Subject: [PATCH 090/155] Exit with status code 1 if any error encountered in check_merge_requests --- .../scripts/check_merge_requests.py | 27 ++++++-- .../scripts/zipfile_to_gitlab_project.py | 6 +- .../setup/Setup_SRE_WebApp_Servers.ps1 | 66 +++++++++---------- 3 files changed, 57 insertions(+), 42 deletions(-) diff --git a/deployment/secure_research_environment/cloud_init/scripts/check_merge_requests.py b/deployment/secure_research_environment/cloud_init/scripts/check_merge_requests.py index 19a69bc9f0..161a611d92 100644 --- a/deployment/secure_research_environment/cloud_init/scripts/check_merge_requests.py +++ b/deployment/secure_research_environment/cloud_init/scripts/check_merge_requests.py @@ -20,6 +20,7 @@ commit, target project, target branch and target commit. """ +import sys import requests import subprocess from urllib.parse import quote as url_quote @@ -42,6 +43,10 @@ logger.addHandler(f_handler) logger.addHandler(c_handler) +# exit status codes +OK_CODE = 0 # everything ran normally +ERROR_CODE = 1 # logger.error or critical encountered + def check_project_exists(repo_name, config): """Determine whether a repo exist in the ingress namespace on @@ -342,13 +347,14 @@ def check_merge_requests(): gitlab server for users.. """ logger.info(f"STARTING RUN") - + return_code = OK_CODE + try: config_gitlabreview = get_api_config(server="GITLAB-REVIEW") config_gitlab = get_api_config(server="GITLAB") except Exception as e: logger.critical(f"Failed to load gitlab secrets: {e}") - return + return ERROR_CODE try: gitlab_status = requests.get( @@ -360,17 +366,17 @@ def check_merge_requests(): logger.critical( f"Gitlab Not Responding: {gitlab_status.status_code}, CONTENT {gitlab_status.content}" ) - return + return ERROR_CODE except Exception as e: logger.critical(f"Gitlab Not Responding: {e}") - return + return ERROR_CODE logger.info("Getting open merge requests for approval") try: merge_requests = get_merge_requests_for_approval(config_gitlabreview) except Exception as e: logger.critical(f"Failed to get merge requests: {e}") - return + return ERROR_CODE logger.info(f"Found {len(merge_requests)} open merge requests") for i, mr in enumerate(merge_requests): @@ -392,6 +398,7 @@ def check_merge_requests(): # Should never get merge conflicts so if we do something has # gone wrong - log an error logger.error(f"Merge Status: {status}") + return_code = ERROR_CODE else: logger.info(f"Merge Status: {status}") wip = mr["work_in_progress"] @@ -404,6 +411,7 @@ def check_merge_requests(): logger.info(f"Downvotes: {downvotes}") except Exception as e: logger.error(f"Failed to extract merge request details: {e}") + return_code = ERROR_CODE continue if ( status == "can_be_merged" @@ -417,6 +425,7 @@ def check_merge_requests(): result = accept_merge_request(mr, config_gitlabreview) except Exception as e: logger.error(f"Merge failed! {e}") + return_code = ERROR_CODE continue if result["state"] == "merged": logger.info(f"Merge successful! Merge SHA {result['merge_commit_sha']}") @@ -428,6 +437,7 @@ def check_merge_requests(): ) except Exception as e: logger.error(f"Failed to log accepted merge request: {e}") + return_code = ERROR_CODE try: logger.info("Pushing project to gitlab user server.") update_repo( @@ -438,13 +448,18 @@ def check_merge_requests(): ) except Exception as e: logger.error(f"Failed to push to gitlab user server: {e}") + return_code = ERROR_CODE else: logger.error(f"Merge failed! Merge status is {result['state']}") + return_code = ERROR_CODE else: logger.info("Merge request has not been approved. Skipping.") logger.info(f"RUN FINISHED") logger.info("=" * 30) + + return return_code if __name__ == "__main__": - check_merge_requests() + return_code = check_merge_requests() + sys.exit(return_code) diff --git a/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py b/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py index 02f08399ba..ae4f575153 100644 --- a/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py +++ b/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py @@ -41,7 +41,7 @@ def unzip_zipfiles(zipfile_dir, tmp_unzipped_dir): for zipfile in zipfiles: filename_match = repo_commit_regex.search(zipfile) if not filename_match: - logger.info("Badly named zipfile! {}".format(zipfile)) + logger.error("Badly named zipfile! {}".format(zipfile)) continue repo_name, commit_hash, branch = filename_match.groups() @@ -55,7 +55,7 @@ def unzip_zipfiles(zipfile_dir, tmp_unzipped_dir): unpacked_location = os.path.join(tmp_unzipped_dir, unpacked_zips[0]) output_list.append((repo_name, commit_hash, branch, unpacked_location)) except (BadZipFile): - logger.info("Bad zipfile: {}".format(zipfile)) + logger.error("Bad zipfile: {}".format(zipfile)) continue return output_list @@ -348,7 +348,7 @@ def create_merge_request( # response.content)) ##### TEMPORARY - don't raise an error here - we get 500 status code ##### even though MR is created it - under investigation. - logger.info( + logger.error( "Problem creating Merge Request {} {} {}: {}".format( repo_name, source_branch, target_branch, response.content ) diff --git a/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 b/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 index 510e5d86ba..be47f1f537 100644 --- a/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 +++ b/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 @@ -68,17 +68,17 @@ $gitlabUserFilter = "(&(objectClass=user)(memberOf=CN=" + $config.sre.domain.sec $gitlabCloudInitTemplate = Join-Path $PSScriptRoot ".." "cloud_init" "cloud-init-gitlab.template.yaml" | Get-Item | Get-Content -Raw $gitlabCloudInit = $gitlabCloudInitTemplate.Replace('', $shmDcFqdn). Replace('', $gitlabLdapUserDn). - Replace('',$gitlabLdapPassword). - Replace('',$config.shm.domain.userOuPath). - Replace('',$gitlabUserFilter). - Replace('',$config.sre.webapps.gitlab.ip). - Replace('',$config.sre.webapps.gitlab.hostname). - Replace('',$gitlabFqdn). - Replace('',$gitlabRootPassword). - Replace('',$config.shm.domain.fqdn). - Replace('',$gitlabUsername). - Replace('',$gitlabPassword). - Replace('',$gitlabAPIToken) + Replace('', $gitlabLdapPassword). + Replace('', $config.shm.domain.userOuPath). + Replace('', $gitlabUserFilter). + Replace('', $config.sre.webapps.gitlab.ip). + Replace('', $config.sre.webapps.gitlab.hostname). + Replace('', $gitlabFqdn). + Replace('', $gitlabRootPassword). + Replace('', $config.shm.domain.fqdn). + Replace('', $gitlabUsername). + Replace('', $gitlabPassword). + Replace('', $gitlabAPIToken) # Encode as base64 $gitlabCloudInitEncoded = [System.Convert]::ToBase64String([System.Text.Encoding]::UTF8.GetBytes($gitlabCloudInit)) @@ -94,12 +94,12 @@ $hackmdCloudInitTemplate = Join-Path $PSScriptRoot ".." "cloud_init" "cloud-init $hackmdCloudInit = $hackmdCloudInitTemplate.Replace('', $hackmdLdapUserDn). Replace('', $hackmdLdapPassword). Replace('',$hackmdUserFilter). - Replace('',$config.shm.domain.userOuPath). - Replace('',$config.sre.webapps.hackmd.ip). - Replace('',$config.sre.webapps.hackmd.hostname). - Replace('',$hackmdFqdn). - Replace('',$hackMdLdapUrl). - Replace('',$config.shm.domain.netbiosName) + Replace('', $config.shm.domain.userOuPath). + Replace('', $config.sre.webapps.hackmd.ip). + Replace('', $config.sre.webapps.hackmd.hostname). + Replace('', $hackmdFqdn). + Replace('', $hackMdLdapUrl). + Replace('', $config.shm.domain.netbiosName) # Encode as base64 $hackmdCloudInitEncoded = [System.Convert]::ToBase64String([System.Text.Encoding]::UTF8.GetBytes($hackmdCloudInit)) @@ -222,24 +222,24 @@ foreach ($scriptName in @("zipfile_to_gitlab_project.py", } -$gitlabReviewCloudInit = $gitlabReviewCloudInitTemplate.Replace('',$sreAdminUsername). - Replace('',$config.sre.webapps.gitlab.ip). - Replace('',$config.shm.domain.fqdn). - Replace('',$gitlabUsername). - Replace('',$gitlabAPIToken). +$gitlabReviewCloudInit = $gitlabReviewCloudInitTemplate.Replace('', $sreAdminUsername). + Replace('', $config.sre.webapps.gitlab.ip). + Replace('', $config.shm.domain.fqdn). + Replace('', $gitlabUsername). + Replace('', $gitlabAPIToken). Replace('', $shmDcFqdn). Replace('', $gitlabLdapUserDn). - Replace('',$gitlabLdapPassword). - Replace('',$config.shm.domain.userOuPath). - Replace('',$gitlabUserFilter). - Replace('',$config.sre.webapps.gitlabreview.ip). - Replace('',$config.sre.webapps.gitlabreview.hostname). - Replace('',$gitlabFqdn). - Replace('',$gitlabRootPassword). - Replace('',$config.shm.domain.fqdn). - Replace('',$gitlabReviewUsername). - Replace('',$gitlabReviewPassword). - Replace('',$gitlabReviewAPIToken) + Replace('', $gitlabLdapPassword). + Replace('', $config.shm.domain.userOuPath). + Replace('', $gitlabUserFilter). + Replace('', $config.sre.webapps.gitlabreview.ip). + Replace('', $config.sre.webapps.gitlabreview.hostname). + Replace('', $gitlabFqdn). + Replace('', $gitlabRootPassword). + Replace('', $config.shm.domain.fqdn). + Replace('', $gitlabReviewUsername). + Replace('', $gitlabReviewPassword). + Replace('', $gitlabReviewAPIToken) $params = @{ Name = $vmNameReview From 60a7ab6afdd7e4b5ec27949341d2ea65961c1889 Mon Sep 17 00:00:00 2001 From: Jack Roberts Date: Fri, 12 Jun 2020 16:50:30 +0100 Subject: [PATCH 091/155] Remove work in progress check Reviewers do not have permission to add this status so doesn't make sense as part of approval checks. --- .../cloud_init/scripts/check_merge_requests.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/deployment/secure_research_environment/cloud_init/scripts/check_merge_requests.py b/deployment/secure_research_environment/cloud_init/scripts/check_merge_requests.py index 161a611d92..ff63ca9818 100644 --- a/deployment/secure_research_environment/cloud_init/scripts/check_merge_requests.py +++ b/deployment/secure_research_environment/cloud_init/scripts/check_merge_requests.py @@ -399,10 +399,9 @@ def check_merge_requests(): # gone wrong - log an error logger.error(f"Merge Status: {status}") return_code = ERROR_CODE + continue else: logger.info(f"Merge Status: {status}") - wip = mr["work_in_progress"] - logger.info(f"Work in Progress: {wip}") unresolved = count_unresolved_mr_discussions(mr, config_gitlabreview) logger.info(f"Unresolved Discussions: {unresolved}") upvotes = mr["upvotes"] @@ -414,9 +413,7 @@ def check_merge_requests(): return_code = ERROR_CODE continue if ( - status == "can_be_merged" - and wip is False - and unresolved == 0 + unresolved == 0 and upvotes >= 2 and downvotes == 0 ): From 4e733d57ef63a681cca7c1126a7d228e66e27479 Mon Sep 17 00:00:00 2001 From: Jack Roberts Date: Fri, 12 Jun 2020 17:18:10 +0100 Subject: [PATCH 092/155] Add effect of thumbs down to readme docs of approval conditions --- .../cloud_init/scripts/check_merge_requests.py | 2 +- .../cloud_init/scripts/zipfile_to_gitlab_project.py | 12 ++++++++---- 2 files changed, 9 insertions(+), 5 deletions(-) diff --git a/deployment/secure_research_environment/cloud_init/scripts/check_merge_requests.py b/deployment/secure_research_environment/cloud_init/scripts/check_merge_requests.py index ff63ca9818..46df71d3b5 100644 --- a/deployment/secure_research_environment/cloud_init/scripts/check_merge_requests.py +++ b/deployment/secure_research_environment/cloud_init/scripts/check_merge_requests.py @@ -396,7 +396,7 @@ def check_merge_requests(): status = mr["merge_status"] if status != "can_be_merged": # Should never get merge conflicts so if we do something has - # gone wrong - log an error + # gone wrong - log an error and skip this merge request. logger.error(f"Merge Status: {status}") return_code = ERROR_CODE continue diff --git a/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py b/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py index ae4f575153..0fbe8148be 100644 --- a/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py +++ b/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py @@ -167,10 +167,14 @@ def create_project(repo_name, namespace_id, gitlab_url, gitlab_token): - Two such approvals are **required** before the merge request will be **automatically merged** and brought into the user-visible GitLab in the Research Environment. -- Any "unresolved threads" will prevent the merge so make sure that - all comment threads in the discussion have been marked as resolved. - -**Important**: Once the repository has had two approvals, the merge +- Any "thumbs down" reactions to the top comment of the Merge Request + will prevent the automated merge. This applies even if there are two + "thumbs up" reactions. +- Any "unresolved threads" will also prevent the merge so make sure + that all comment threads in the discussion have been marked as + resolved once they have been addressed. + +**Important**: Once the conditions above have been met, the merge will be made automatically. This could take up to 10 minutes. There is no need (and you will not have the capability) to merge manually. From 1b5f85fc8c9065cf42622f345a9b7104ead4401f Mon Sep 17 00:00:00 2001 From: Jack Roberts Date: Fri, 12 Jun 2020 17:33:19 +0100 Subject: [PATCH 093/155] Give ingress users dummy email addresses rather than using the SHM fqdn --- .../cloud-init-gitlab-review.template.yaml | 15 ++++++++------- .../cloud_init/cloud-init-gitlab.template.yaml | 7 ++++--- .../setup/Setup_SRE_WebApp_Servers.ps1 | 1 - 3 files changed, 12 insertions(+), 11 deletions(-) diff --git a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-review.template.yaml b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-review.template.yaml index cc09d7a2ee..f13792ccf4 100644 --- a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-review.template.yaml +++ b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-review.template.yaml @@ -59,13 +59,13 @@ write_files: "GITLAB": { "ip_address": "", "username": "", - "user_email": "@", + "user_email": "@example.com", "api_token": "" }, "GITLAB-REVIEW": { "ip_address": "", "username": "", - "user_email": "@", + "user_email": "@example.com", "api_token": "" } } @@ -120,12 +120,13 @@ runcmd: # Turn off user account creation - | gitlab-rails runner "ApplicationSetting.last.update_attributes(signup_enabled: false)" - # Restrict login to SHM domain (must be done AFTER GitLab update) - - | - gitlab-rails runner "ApplicationSetting.last.update_attributes(domain_whitelist: [''])" # Create user for ingressing external git repos - | - echo "user = User.create(:username => '', :password => '', :password_confirmation => '', :email =>'@', :skip_confirmation => true, :name => '');user.save!;exit;" | gitlab-rails console -e production + echo "user = User.create(:username => '', :password => '', :password_confirmation => '', :email =>'@example.com', :skip_confirmation => true, :name => '');user.save!;exit;" | gitlab-rails console -e production + + # Restrict login to SHM domain (must be done AFTER GitLab update) + - | + gitlab-rails runner "ApplicationSetting.last.update_attributes(domain_whitelist: [''])" # Create a API token for the ingress user created above - | token=$(/home//gitlab_config.py --server GITLAB-REVIEW --value api_token --file /home//.secrets/gitlab-config.json) @@ -192,7 +193,7 @@ runcmd: - | echo "Configuring git" HOME=/home/ git config --global user.name '' - HOME=/home/ git config --global user.email '@' + HOME=/home/ git config --global user.email '@example.com' # -------------------------------- # ADD CRONTAB ENTRIES FOR GITLAB SCRIPTS # -------------------------------- diff --git a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab.template.yaml b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab.template.yaml index 8e13dd69b0..9aa2ad1108 100644 --- a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab.template.yaml +++ b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab.template.yaml @@ -79,12 +79,13 @@ runcmd: # Turn off user account creation - | gitlab-rails runner "ApplicationSetting.last.update_attributes(signup_enabled: false)" + # Create user for ingressing external git repos + - | + echo "user = User.create(:username => '', :password => '', :password_confirmation => '', :email =>'@example.com', :skip_confirmation => true, :name => '');user.save!;exit;" | gitlab-rails console -e production + # Restrict login to SHM domain (must be done AFTER GitLab update) - | gitlab-rails runner "ApplicationSetting.last.update_attributes(domain_whitelist: [''])" - # Create user for ingressing external git repos - - | - echo "user = User.create(:username => '', :password => '', :password_confirmation => '', :email =>'@', :skip_confirmation => true, :name => '');user.save!;exit;" | gitlab-rails console -e production # Create a API token for the ingress user created above - | echo "user = User.find_by(username: '');user.personal_access_tokens.create(name: 'apitoken', token_digest: Gitlab::CryptoHelper.sha256(''), impersonation: false, scopes: [:api]);exit;" | gitlab-rails console -e production diff --git a/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 b/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 index be47f1f537..32375c8b84 100644 --- a/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 +++ b/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 @@ -224,7 +224,6 @@ foreach ($scriptName in @("zipfile_to_gitlab_project.py", $gitlabReviewCloudInit = $gitlabReviewCloudInitTemplate.Replace('', $sreAdminUsername). Replace('', $config.sre.webapps.gitlab.ip). - Replace('', $config.shm.domain.fqdn). Replace('', $gitlabUsername). Replace('', $gitlabAPIToken). Replace('', $shmDcFqdn). From 4a01cd43e00c60a9fbea339220b15d50d1cbd8c0 Mon Sep 17 00:00:00 2001 From: Oliver Strickson Date: Fri, 12 Jun 2020 18:35:09 +0100 Subject: [PATCH 094/155] Add default value for targetRepoName based on sourceGitURL --- .../administration/SRE_Upload_Git_Repo_to_GitlabReview.ps1 | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/deployment/administration/SRE_Upload_Git_Repo_to_GitlabReview.ps1 b/deployment/administration/SRE_Upload_Git_Repo_to_GitlabReview.ps1 index 5e5f08d479..f907d5ec47 100644 --- a/deployment/administration/SRE_Upload_Git_Repo_to_GitlabReview.ps1 +++ b/deployment/administration/SRE_Upload_Git_Repo_to_GitlabReview.ps1 @@ -3,8 +3,9 @@ param( [string]$sreId, [Parameter( Mandatory = $true, HelpMessage = "Enter the git URL of the source repository")] [string]$sourceGitURL, - [Parameter( Mandatory = $true, HelpMessage = "Enter the name of the repository as it should appear within SRE GITLAB")] - [string]$targetRepoName, + ## interpret the basename of the final path segment in a (possibly encoded) URI as the name of the repository + [Parameter( Mandatory = $false, HelpMessage = "Enter the name of the repository as it should appear within SRE GITLAB (default is the basename of the final path segment of the git URL)")] + [string]$targetRepoName = [uri]::UnescapeDataString((Split-Path -Path ([uri]$sourceGitURL).Segments[-1] -LeafBase)), [Parameter( Mandatory = $true, HelpMessage = "Enter the full commit hash of the commit in the source repository to snapshot")] [string]$sourceCommitHash, [Parameter( Mandatory = $true, HelpMessage = "Enter the desired branch name where the snapshot should be placed (in the repository inside SRE GITLAB)")] From de0c6694683b766efebfc937860351d79b31bf2f Mon Sep 17 00:00:00 2001 From: Oliver Strickson Date: Fri, 12 Jun 2020 19:13:48 +0100 Subject: [PATCH 095/155] Use a temporary container with a unique name to store the repo zipfile upload (then clean up) --- .../administration/SRE_Upload_Git_Repo_to_GitlabReview.ps1 | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/deployment/administration/SRE_Upload_Git_Repo_to_GitlabReview.ps1 b/deployment/administration/SRE_Upload_Git_Repo_to_GitlabReview.ps1 index f907d5ec47..de24107c29 100644 --- a/deployment/administration/SRE_Upload_Git_Repo_to_GitlabReview.ps1 +++ b/deployment/administration/SRE_Upload_Git_Repo_to_GitlabReview.ps1 @@ -60,12 +60,11 @@ $storageResourceGroupName = $config.sre.storage.artifacts.rg $sreStorageAccountName = $config.sre.storage.artifacts.accountName $sreStorageAccount = Deploy-StorageAccount -Name $sreStorageAccountName -ResourceGroupName $storageResourceGroupName -Location $config.sre.location -# Create container if not already there -$containerName = $config.sre.storage.artifacts.containers.gitlabAirlockName +# Create a temporary storage container +$containerName = $config.sre.storage.artifacts.containers.gitlabAirlockName + "-" + [Guid]::NewGuid().ToString() # Ensure an empty storage container of the given name exists $_ = Deploy-StorageContainer -Name $containerName -StorageAccount $sreStorageAccount -$_ = Clear-StorageContainer -Name $containerName -StorageAccount $sreStorageAccount # copy zipfile to blob storage # ---------------------------- @@ -99,6 +98,8 @@ $result = Invoke-RemoteScript -Shell "UnixShell" -Script $script -VMName $gitlab Add-LogMessage -Level Info "[ ] Removing original zipfile $zipFilePath" Remove-Item -Path $zipFilePath +# Remove the temporary storage container +Remove-AzStorageContainer -Name $containerName # Switch back to original subscription # ------------------------------------ From c2b01c0c4cfc9905f5b7941bc244536a9d10b9e5 Mon Sep 17 00:00:00 2001 From: James Robinson Date: Fri, 19 Jun 2020 17:08:08 +0100 Subject: [PATCH 096/155] Updated NSG rules. Reordered webapp server deployment. Use a stronger password for HackMD database. --- .../cloud-init-hackmd.template.yaml | 6 +- .../setup/Setup_SRE_WebApp_Servers.ps1 | 149 ++++++++++-------- 2 files changed, 82 insertions(+), 73 deletions(-) diff --git a/deployment/secure_research_environment/cloud_init/cloud-init-hackmd.template.yaml b/deployment/secure_research_environment/cloud_init/cloud-init-hackmd.template.yaml index 12254ee647..3d2ad09c53 100644 --- a/deployment/secure_research_environment/cloud_init/cloud-init-hackmd.template.yaml +++ b/deployment/secure_research_environment/cloud_init/cloud-init-hackmd.template.yaml @@ -39,7 +39,7 @@ write_files: # - /tmp:size=256K environment: - POSTGRES_USER=hackmd - - POSTGRES_PASSWORD=hackmdpass + - POSTGRES_PASSWORD= - POSTGRES_DB=hackmd volumes: - database:/var/lib/postgresql/data @@ -62,10 +62,10 @@ write_files: environment: # DB_URL is formatted like: ://:@/ # Other examples are: - # - mysql://hackmd:hackmdpass@database:3306/hackmd + # - mysql://hackmd:@database:3306/hackmd # - sqlite:///data/sqlite.db (NOT RECOMMENDED) # - For details see the official sequelize docs: http://docs.sequelizejs.com/en/v3/ - - CMD_DB_URL=postgres://hackmd:hackmdpass@database:5432/hackmd + - CMD_DB_URL=postgres://hackmd:@database:5432/hackmd - CMD_ALLOW_ANONYMOUS=false - CMD_ALLOW_FREEURL=true - CMD_EMAIL=false diff --git a/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 b/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 index 32375c8b84..1bfa2b3ff7 100644 --- a/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 +++ b/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 @@ -28,39 +28,56 @@ $gitlabLdapPassword = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.nam $gitlabReviewUsername = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -SecretName $config.sre.keyVault.secretNames.gitlabReviewUsername -DefaultValue "ingress" $gitlabReviewPassword = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -SecretName $config.sre.keyVault.secretNames.gitlabReviewPassword $gitlabReviewAPIToken = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -SecretName $config.sre.keyVault.secretNames.gitlabReviewAPIToken -$hackmdUserPassword = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -SecretName $config.sre.keyVault.secretNames.hackmdUserPassword +$hackmdPostgresPassword = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -SecretName $config.sre.keyVault.secretNames.hackmdUserPassword $hackmdLdapPassword = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -SecretName $config.sre.keyVault.secretNames.hackmdLdapPassword $gitlabUsername = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -SecretName $config.sre.keyVault.secretNames.gitlabUsername -DefaultValue "ingress" $gitlabPassword = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -SecretName $config.sre.keyVault.secretNames.gitlabPassword $gitlabAPIToken = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -SecretName $config.sre.keyVault.secretNames.gitlabAPIToken -# Set up the NSG for the webapps -# ------------------------------ -$nsgGitlab = Deploy-NetworkSecurityGroup -Name $config.sre.webapps.nsg -ResourceGroupName $config.sre.network.vnet.rg -Location $config.sre.location -Add-NetworkSecurityGroupRule -NetworkSecurityGroup $nsgGitlab ` +# Set up NSGs for the webapps +# --------------------------- +$nsgWebapps = Deploy-NetworkSecurityGroup -Name $config.sre.webapps.nsg -ResourceGroupName $config.sre.network.vnet.rg -Location $config.sre.location +Add-NetworkSecurityGroupRule -NetworkSecurityGroup $nsgWebapps ` -Name "OutboundDenyInternet" ` -Description "Outbound deny internet" ` -Priority 4000 ` -Direction Outbound -Access Deny -Protocol * ` -SourceAddressPrefix VirtualNetwork -SourcePortRange * ` -DestinationAddressPrefix Internet -DestinationPortRange * +Add-NetworkSecurityGroupRule -NetworkSecurityGroup $nsgAirlock ` + -Name "OutboundDenyVNet" ` + -Description "Outbound deny VNet connections" ` + -Priority 3000 ` + -Direction Inbound -Access Deny -Protocol * ` + -SourceAddressPrefix VirtualNetwork -SourcePortRange * ` + -DestinationAddressPrefix VirtualNetwork -DestinationPortRange * +$nsgAirlock = Deploy-NetworkSecurityGroup -Name $config.sre.network.nsg.airlock.name -ResourceGroupName $config.sre.network.vnet.rg -Location $config.sre.location +Add-NetworkSecurityGroupRule -NetworkSecurityGroup $nsgAirlock ` + -Name "InboundAllowReviewServer" ` + -Description "Inbound allow connections from review session host" ` + -Priority 3000 ` + -Direction Inbound -Access Allow -Protocol * ` + -SourceAddressPrefix $config.sre.rds.sessionHost3.ip -SourcePortRange * ` + -DestinationAddressPrefix VirtualNetwork -DestinationPortRange * +Add-NetworkSecurityGroupRule -NetworkSecurityGroup $nsgAirlock ` + -Name "InboundDenyOtherVNet" ` + -Description "Inbound deny other VNet connections" ` + -Priority 4000 ` + -Direction Inbound -Access Deny -Protocol * ` + -SourceAddressPrefix VirtualNetwork -SourcePortRange * ` + -DestinationAddressPrefix VirtualNetwork -DestinationPortRange * -$nsgGitlabReview = Deploy-NetworkSecurityGroup -Name $config.sre.network.nsg.airlock.name -ResourceGroupName $config.sre.network.vnet.rg -Location $config.sre.location - - -# Check that VNET and subnet exist -# -------------------------------- - -$vnet = Deploy-VirtualNetwork -Name $config.sre.network.vnet.Name -ResourceGroupName $config.sre.network.vnet.rg -AddressPrefix $config.sre.network.vnet.cidr -Location $config.sre.location -$subnet = Deploy-Subnet -Name $config.sre.network.subnets.airlock.Name -VirtualNetwork $vnet -AddressPrefix $config.sre.network.subnets.airlock.cidr - -Set-SubnetNetworkSecurityGroup -Subnet $subnet -NetworkSecurityGroup $nsgGitlabReview -VirtualNetwork $vnet +# Check that VNET and subnets exist +# --------------------------------- +$vnet = Deploy-VirtualNetwork -Name $config.sre.network.vnet.name -ResourceGroupName $config.sre.network.vnet.rg -AddressPrefix $config.sre.network.vnet.cidr -Location $config.sre.location +$null = Deploy-Subnet -Name $config.sre.network.subnets.data.name -VirtualNetwork $vnet -AddressPrefix $config.sre.network.subnets.data.cidr +$airlockSubnet = Deploy-Subnet -Name $config.sre.network.subnets.airlock.name -VirtualNetwork $vnet -AddressPrefix $config.sre.network.subnets.airlock.cidr # Expand GitLab cloudinit -# -------------------------------- +# ----------------------- $shmDcFqdn = ($config.shm.dc.hostname + "." + $config.shm.domain.fqdn) $gitlabFqdn = $config.sre.webapps.gitlab.hostname + "." + $config.sre.domain.fqdn $gitlabLdapUserDn = "CN=" + $config.sre.users.ldap.gitlab.name + "," + $config.shm.domain.serviceOuPath @@ -79,7 +96,6 @@ $gitlabCloudInit = $gitlabCloudInitTemplate.Replace('', $shmDcFq Replace('', $gitlabUsername). Replace('', $gitlabPassword). Replace('', $gitlabAPIToken) - # Encode as base64 $gitlabCloudInitEncoded = [System.Convert]::ToBase64String([System.Text.Encoding]::UTF8.GetBytes($gitlabCloudInit)) @@ -99,7 +115,8 @@ $hackmdCloudInit = $hackmdCloudInitTemplate.Replace('', $hackmdL Replace('', $config.sre.webapps.hackmd.hostname). Replace('', $hackmdFqdn). Replace('', $hackMdLdapUrl). - Replace('', $config.shm.domain.netbiosName) + Replace('', $config.shm.domain.netbiosName). + Replace('', $hackmdPostgresPassword) # Encode as base64 $hackmdCloudInitEncoded = [System.Convert]::ToBase64String([System.Text.Encoding]::UTF8.GetBytes($hackmdCloudInit)) @@ -146,15 +163,13 @@ while (-Not ($gitlabStatuses.Contains("ProvisioningState/succeeded") -and $gitla Start-Sleep 10 } + # While webapp servers are off, ensure they are bound to correct NSG # ------------------------------------------------------------------ Add-LogMessage -Level Info "Ensure webapp servers and compute VMs are bound to correct NSG..." foreach ($vmName in ($config.sre.webapps.hackmd.vmName, $config.sre.webapps.gitlab.vmName)) { - Add-VmToNSG -VMName $vmName -NSGName $nsgGitlab.Name + Add-VmToNSG -VMName $vmName -NSGName $nsgWebapps.Name } -Start-Sleep -Seconds 30 -Add-LogMessage -Level Info "Summary: NICs associated with '$($nsgGitlab.Name)' NSG" -@($nsgGitlab.NetworkInterfaces) | ForEach-Object { Add-LogMessage -Level Info "=> $($_.Id.Split('/')[-1])" } # Reboot the HackMD and Gitlab servers @@ -170,76 +185,72 @@ foreach ($nameVMNameParamsPair in (("HackMD", $config.sre.webapps.hackmd.vmName) } } -# Deploy NIC and data disks for gitlab review -# --------------------------------------------- -$vmNameReview = $config.sre.webapps.gitlabreview.vmName -$vmIpAddress = $config.sre.webapps.gitlabreview.ip -$vmNic = Deploy-VirtualMachineNIC -Name "$vmNameReview-NIC" -ResourceGroupName $config.sre.webapps.rg -Subnet $subnet -PrivateIpAddress $vmIpAddress -Location $config.sre.location - - -# Deploy the GitLab review VM -# ------------------------------ -$bootDiagnosticsAccount = Deploy-StorageAccount -Name $config.sre.storage.bootdiagnostics.accountName -ResourceGroupName $config.sre.storage.bootdiagnostics.rg -Location $config.sre.location - - -$shmDcFqdn = ($config.shm.dc.hostname + "." + $config.shm.domain.fqdn) -$gitlabFqdn = $config.sre.webapps.gitlabreview.hostname + "." + $config.sre.domain.fqdn -$gitlabLdapUserDn = "CN=" + $config.sre.users.ldap.gitlab.name + "," + $config.shm.domain.serviceOuPath -$gitlabUserFilter = "(&(objectClass=user)(memberOf=CN=" + $config.sre.domain.securityGroups.reviewUsers.name + "," + $config.shm.domain.securityOuPath + "))" - -$gitlabReviewCloudInitTemplate = Join-Path $PSScriptRoot ".." "cloud_init" "cloud-init-gitlab-review.template.yaml" | Get-Item | Get-Content -Raw - -# Get public SSH keys from gitlab (so it can be added as a known host on gitlab review) -# ------------------------------ +# Get public SSH keys from the GitLab server +# This allows it to be added as a known host on the GitLab review server +# ---------------------------------------------------------------------- $script = ' #! /bin/bash echo " $(cat /etc/ssh/ssh_host_rsa_key.pub | cut -d " " -f -2)" echo " $(cat /etc/ssh/ssh_host_ed25519_key.pub | cut -d " " -f -2)" echo " $(cat /etc/ssh/ssh_host_ecdsa_key.pub | cut -d " " -f -2)" '.Replace('', $config.sre.webapps.gitlab.ip) -$vmName = $config.sre.webapps.gitlab.vmName -$result = Invoke-RemoteScript -VMName $vmName -ResourceGroupName $config.sre.webapps.rg -Shell "UnixShell" -Script $script +$result = Invoke-RemoteScript -VMName $config.sre.webapps.gitlab.vmName -ResourceGroupName $config.sre.webapps.rg -Shell "UnixShell" -Script $script Add-LogMessage -Level Success "Fetching ssh keys from gitlab succeeded" # Extract everything in between the [stdout] and [stderr] blocks of the result message. i.e. all output of the script. $sshKeys = $result.Value[0].Message | Select-String "\[stdout\]\s*([\s\S]*?)\s*\[stderr\]" -$sshKeys = $sshKeys.Matches.Groups[1].Value -# Insert keys into cloud init template, maintaining indentation -$indent = " " -$indented_sshKeys = $sshKeys -split "`n" | ForEach-Object { "${indent}$_" } | Join-String -Separator "`n" -$gitlabReviewCloudInitTemplate = $gitlabReviewCloudInitTemplate.Replace("${indent}", $indented_sshKeys) +# $sshKeys = $sshKeys.Matches.Groups[1].Value -# Insert scripts into the cloud-init template -# ------------------------------------------- +# Deploy NIC and data disks for GitLab review server +# -------------------------------------------------- +$bootDiagnosticsAccount = Deploy-StorageAccount -Name $config.sre.storage.bootdiagnostics.accountName -ResourceGroupName $config.sre.storage.bootdiagnostics.rg -Location $config.sre.location +$vmNameReview = $config.sre.webapps.gitlabreview.vmName +$vmIpAddress = $config.sre.webapps.gitlabreview.ip +$vmNic = Deploy-VirtualMachineNIC -Name "$vmNameReview-NIC" -ResourceGroupName $config.sre.webapps.rg -Subnet $subnet -PrivateIpAddress $vmIpAddress -Location $config.sre.location + + +# Expand GitLab review cloudinit +# ------------------------------ +$gitlabReviewCloudInitTemplate = Join-Path $PSScriptRoot ".." "cloud_init" "cloud-init-gitlab-review.template.yaml" | Get-Item | Get-Content -Raw +$gitlabReviewFqdn = $config.sre.webapps.gitlabreview.hostname + "." + $config.sre.domain.fqdn +$gitlabReviewLdapUserDn = "CN=" + $config.sre.users.ldap.gitlab.name + "," + $config.shm.domain.serviceOuPath +$gitlabReviewUserFilter = "(&(objectClass=user)(memberOf=CN=" + $config.sre.domain.securityGroups.reviewUsers.name + "," + $config.shm.domain.securityOuPath + "))" + +# Insert SSH keys and scripts into cloud init template, maintaining indentation $indent = " " foreach ($scriptName in @("zipfile_to_gitlab_project.py", "check_merge_requests.py", - "gitlab_config.py")) { - $raw_script = Get-Content (Join-Path $PSScriptRoot ".." "cloud_init" "scripts" $scriptName) -Raw + "gitlab_config.py", + "gitlab-ssh-keys")) { + if ($scriptName -eq "gitlab-ssh-keys") { + $raw_script = $sshKeys.Matches.Groups[1].Value + } else { + $raw_script = Get-Content (Join-Path $PSScriptRoot ".." "cloud_init" "scripts" $scriptName) -Raw + } $indented_script = $raw_script -split "`n" | ForEach-Object { "${indent}$_" } | Join-String -Separator "`n" $gitlabReviewCloudInitTemplate = $gitlabReviewCloudInitTemplate.Replace("${indent}<$scriptName>", $indented_script) } - +# Insert other variables into template $gitlabReviewCloudInit = $gitlabReviewCloudInitTemplate.Replace('', $sreAdminUsername). Replace('', $config.sre.webapps.gitlab.ip). Replace('', $gitlabUsername). Replace('', $gitlabAPIToken). Replace('', $shmDcFqdn). - Replace('', $gitlabLdapUserDn). + Replace('', $gitlabReviewLdapUserDn). Replace('', $gitlabLdapPassword). Replace('', $config.shm.domain.userOuPath). - Replace('', $gitlabUserFilter). + Replace('', $gitlabReviewUserFilter). Replace('', $config.sre.webapps.gitlabreview.ip). Replace('', $config.sre.webapps.gitlabreview.hostname). - Replace('', $gitlabFqdn). + Replace('', $gitlabReviewFqdn). Replace('', $gitlabRootPassword). Replace('', $config.shm.domain.fqdn). Replace('', $gitlabReviewUsername). Replace('', $gitlabReviewPassword). Replace('', $gitlabReviewAPIToken) - +# Deploy VM and add to correct NSG when done $params = @{ Name = $vmNameReview Size = $config.sre.webapps.gitlabreview.vmSize @@ -254,19 +265,17 @@ $params = @{ ImageSku = "18.04-LTS" } $_ = Deploy-UbuntuVirtualMachine @params +Wait-ForAzVMCloudInit -Name $vmNameReview -ResourceGroupName $config.sre.webapps.rg +Add-VmToNSG -VMName $vmNameReview -NSGName $nsgAirlock +Enable-AzVM -Name $vmNameReview -ResourceGroupName $config.sre.webapps.rg -Add-LogMessage -Level Info "Waiting for cloud-init provisioning to finish (this will take 5+ minutes)..." -$progress = 0 -$gitlabStatuses = (Get-AzVM -Name $config.sre.webapps.gitlabreview.vmName -ResourceGroupName $config.sre.webapps.rg -Status).Statuses.Code -while (-Not ($gitlabStatuses.Contains("ProvisioningState/succeeded") -and $gitlabStatuses.Contains("PowerState/stopped"))) { - $progress = [math]::min(100, $progress + 1) - $gitlabStatuses = (Get-AzVM -Name $config.sre.webapps.gitlabreview.vmName -ResourceGroupName $config.sre.webapps.rg -Status).Statuses.Code - Write-Progress -Activity "Deployment status:" -Status "GitLab Review [$($gitlabStatuses[0]) $($gitlabStatuses[1])]" -PercentComplete $progress - Start-Sleep 10 -} -Add-LogMessage -Level Info "Rebooting the $name VM: '$vmNameReview'" -Enable-AzVM -Name $vmNameReview -ResourceGroupName $config.sre.webapps.rg +# List VMs connected to each NSG +# ------------------------------ +foreach ($nsg in @($nsgWebapps, $nsgAirlock)) { + Add-LogMessage -Level Info "Summary: NICs associated with '$($nsg.Name)' NSG" + @($nsg.NetworkInterfaces) | ForEach-Object { Add-LogMessage -Level Info "=> $($_.Id.Split('/')[-1])" } +} # Switch back to original subscription From ef3ac4302a5b7cfa8f9967d8e55c6b4e3621b590 Mon Sep 17 00:00:00 2001 From: James Robinson Date: Fri, 19 Jun 2020 17:09:55 +0100 Subject: [PATCH 097/155] Added gitlab and hackmd daemon users --- .../cloud_init/cloud-init-gitlab-review.template.yaml | 10 +++++++++- .../cloud_init/cloud-init-gitlab.template.yaml | 8 ++++++++ .../cloud_init/cloud-init-hackmd.template.yaml | 8 ++++++++ 3 files changed, 25 insertions(+), 1 deletion(-) diff --git a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-review.template.yaml b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-review.template.yaml index f13792ccf4..6e8225c4a2 100644 --- a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-review.template.yaml +++ b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-review.template.yaml @@ -89,6 +89,14 @@ write_files: content: | +# Add the SHM admin (default) and gitlabdaemon users +# lock_passwd: Lock the password to disable password login +users: + - default + - name: gitlabdaemon + lock_passwd: True + sudo: False + runcmd: # -------------------------------- # SETUP GITLAB REVIEW SERVER @@ -178,7 +186,7 @@ runcmd: token=$(/home//gitlab_config.py --server GITLAB-REVIEW --value api_token --file /home//.secrets/gitlab-config.json) curl --header "Authorization: Bearer $token" --header 'Content-Type:application/json' --data "{\"key\": \"$key\", \"title\": \"ReviewAPIUser\"}" /api/v4/user/keys # Get local ssh host keys, add them to known hosts under the gitlab review ip - - | + - | echo " $(cat /etc/ssh/ssh_host_rsa_key.pub | cut -d " " -f -2)" >> /home//.ssh/known_hosts echo " $(cat /etc/ssh/ssh_host_ed25519_key.pub | cut -d " " -f -2)" >> /home//.ssh/known_hosts echo " $(cat /etc/ssh/ssh_host_ecdsa_key.pub | cut -d " " -f -2)" >> /home//.ssh/known_hosts diff --git a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab.template.yaml b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab.template.yaml index 9aa2ad1108..0fd8e5e605 100644 --- a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab.template.yaml +++ b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab.template.yaml @@ -51,6 +51,14 @@ write_files: EOS git_data_dirs({ "default" => { "path" => "/datadrive/gitdata" } }) +# Add the SHM admin (default) and gitlabdaemon users +# lock_passwd: Lock the password to disable password login +users: + - default + - name: gitlabdaemon + lock_passwd: True + sudo: False + runcmd: # Configure server - echo "Configuring server" diff --git a/deployment/secure_research_environment/cloud_init/cloud-init-hackmd.template.yaml b/deployment/secure_research_environment/cloud_init/cloud-init-hackmd.template.yaml index 3d2ad09c53..4423182abd 100644 --- a/deployment/secure_research_environment/cloud_init/cloud-init-hackmd.template.yaml +++ b/deployment/secure_research_environment/cloud_init/cloud-init-hackmd.template.yaml @@ -102,6 +102,14 @@ write_files: database: uploads: +# Add the SHM admin (default) and hackmddaemon users +# lock_passwd: Lock the password to disable password login +users: + - default + - name: hackmddaemon + lock_passwd: True + sudo: False + runcmd: # Configure server - echo "Configuring server" From cf1c8e6163ac6d02828f8f495f8a3c4fe823ddef Mon Sep 17 00:00:00 2001 From: James Robinson Date: Fri, 19 Jun 2020 17:22:00 +0100 Subject: [PATCH 098/155] Updated disk provisioning to native cloud-init syntax --- deployment/common/Deployments.psm1 | 2 +- .../arm_templates/sre-webapps-template.json | 2 +- .../cloud-init-gitlab-review.template.yaml | 25 +++++++++++-------- .../cloud-init-gitlab.template.yaml | 25 +++++++++++-------- 4 files changed, 32 insertions(+), 22 deletions(-) diff --git a/deployment/common/Deployments.psm1 b/deployment/common/Deployments.psm1 index 916738ce04..3856c80c6f 100644 --- a/deployment/common/Deployments.psm1 +++ b/deployment/common/Deployments.psm1 @@ -404,7 +404,7 @@ function Deploy-UbuntuVirtualMachine { # Add optional data disks $lun = 0 foreach ($diskId in $DataDiskIds) { - $lun += 1 + $lun += 1 # NB. this line means that our first disk gets deployed at lun1 and we do not use lun0. Consider changing this. $vmConfig = Add-AzVMDataDisk -VM $vmConfig -ManagedDiskId $diskId -CreateOption Attach -Lun $lun } Add-LogMessage -Level Info "[ ] Creating virtual machine '$Name'" diff --git a/deployment/secure_research_environment/arm_templates/sre-webapps-template.json b/deployment/secure_research_environment/arm_templates/sre-webapps-template.json index 52d9b78c88..cdca5b90e3 100644 --- a/deployment/secure_research_environment/arm_templates/sre-webapps-template.json +++ b/deployment/secure_research_environment/arm_templates/sre-webapps-template.json @@ -140,7 +140,7 @@ "diskSizeGB": 50 }, "dataDisks": [{ - "lun": 0, + "lun": 1, "name": "[concat(parameters('GitLab_Server_Name'),'-DATA-DISK')]", "createOption": "Empty", "caching": "None", diff --git a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-review.template.yaml b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-review.template.yaml index 6e8225c4a2..91813cdcd9 100644 --- a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-review.template.yaml +++ b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-review.template.yaml @@ -22,6 +22,21 @@ apt: source: "deb https://packages.gitlab.com/gitlab/gitlab-ce/ubuntu bionic main" keyid: 3F01618A51312F3F +# We know that exactly one data disk will be attached to this VM and it will be attached as lun1 +disk_setup: + /dev/disk/azure/scsi1/lun1: + table_type: gpt + layout: True + overwrite: True + +fs_setup: + - device: /dev/disk/azure/scsi1/lun1 + partition: 1 + filesystem: ext4 + +mounts: + - [/dev/disk/azure/scsi1/lun1-part1, /datadrive, ext4, "defaults,nofail"] + write_files: # Gitlab server config - path: /etc/gitlab/gitlab.rb @@ -108,16 +123,6 @@ runcmd: - dpkg-reconfigure -f noninteractive tzdata # Set up the data disk - echo "Setting up data disk..." - - DEVICE=$(readlink -f /dev/disk/azure/scsi1/lun0) - - parted ${DEVICE} mklabel gpt - - parted ${DEVICE} mkpart primary ext4 0% 100% - - parted ${DEVICE} print - - sleep 5 - - mkfs -t ext4 ${DEVICE}1 - - mkdir -p /datadrive - - mount ${DEVICE}1 /datadrive - - UUID=$(blkid | grep "${DEVICE}1" | cut -d'"' -f2) - - echo "UUID=${UUID}\t/datadrive\text4\tdefaults,nofail\t1\t2" >> /etc/fstab - mkdir -p /datadrive/gitdata # Enable custom GitLab settings and run an initial configuration - echo "Running initial configuration" diff --git a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab.template.yaml b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab.template.yaml index 0fd8e5e605..a78d78e933 100644 --- a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab.template.yaml +++ b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab.template.yaml @@ -22,6 +22,21 @@ apt: source: "deb https://packages.gitlab.com/gitlab/gitlab-ce/ubuntu bionic main" keyid: 3F01618A51312F3F +# We know that exactly one data disk will be attached to this VM and it will be attached as lun1 +disk_setup: + /dev/disk/azure/scsi1/lun1: + table_type: gpt + layout: True + overwrite: True + +fs_setup: + - device: /dev/disk/azure/scsi1/lun1 + partition: 1 + filesystem: ext4 + +mounts: + - [/dev/disk/azure/scsi1/lun1-part1, /datadrive, ext4, "defaults,nofail"] + write_files: - path: /etc/gitlab/gitlab.rb permissions: "0600" @@ -67,16 +82,6 @@ runcmd: - dpkg-reconfigure -f noninteractive tzdata # Set up the data disk - echo "Setting up data disk..." - - DEVICE=$(readlink -f /dev/disk/azure/scsi1/lun0) - - parted $DEVICE mklabel gpt - - parted $DEVICE mkpart primary ext4 0% 100% - - parted $DEVICE print - - sleep 5 - - mkfs -t ext4 ${DEVICE}1 - - mkdir -p /datadrive - - mount ${DEVICE}1 /datadrive - - UUID=$(blkid | grep "${DEVICE}1" | cut -d'"' -f2) - - echo "UUID=${UUID}\t/datadrive\text4\tdefaults,nofail\t1\t2" >> /etc/fstab - mkdir -p /datadrive/gitdata # Enable custom GitLab settings and run an initial configuration - echo "Running initial configuration" From ad02e4428a13d69df060b716d08ee6e3d64a9a50 Mon Sep 17 00:00:00 2001 From: James Robinson Date: Fri, 19 Jun 2020 17:39:32 +0100 Subject: [PATCH 099/155] Switch to using gitlabdaemon for GitLab review server automation --- .../cloud-init-gitlab-review.template.yaml | 48 +++++++++---------- .../cloud-init-gitlab.template.yaml | 6 +-- 2 files changed, 25 insertions(+), 29 deletions(-) diff --git a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-review.template.yaml b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-review.template.yaml index 91813cdcd9..6e1ee16a99 100644 --- a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-review.template.yaml +++ b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-review.template.yaml @@ -67,7 +67,7 @@ write_files: EOS git_data_dirs({ "default" => { "path" => "/datadrive/gitdata" } }) # Secrets for Gitlab and Review Access, and script to get them - - path: "/home//.secrets/gitlab-config.json" + - path: "/home/gitlabdaemon/.secrets/gitlab-config.json" permissions: "0600" content: | { @@ -84,27 +84,27 @@ write_files: "api_token": "" } } - - path: "/home//gitlab_config.py" + - path: "/home/gitlabdaemon/gitlab_config.py" permissions: "0755" content: | # Script for creating projects and merge requests on gitlab-review - - path: "/home//zipfile_to_gitlab_project.py" + - path: "/home/gitlabdaemon/zipfile_to_gitlab_project.py" permissions: "0755" content: | # Script for monitoring and accepting merge requests - - path: "/home//check_merge_requests.py" + - path: "/home/gitlabdaemon/check_merge_requests.py" permissions: "0755" content: | # Populate SSH known hosts with keys from gitlab server - - path: "/home//.ssh/known_hosts" + - path: "/home/gitlabdaemon/.ssh/known_hosts" permissions: "0600" content: | -# Add the SHM admin (default) and gitlabdaemon users +# Add the SRE admin (default) and gitlabdaemon users # lock_passwd: Lock the password to disable password login users: - default @@ -142,7 +142,7 @@ runcmd: gitlab-rails runner "ApplicationSetting.last.update_attributes(domain_whitelist: [''])" # Create a API token for the ingress user created above - | - token=$(/home//gitlab_config.py --server GITLAB-REVIEW --value api_token --file /home//.secrets/gitlab-config.json) + token=$(/home/gitlabdaemon/gitlab_config.py --server GITLAB-REVIEW --value api_token --file /home/gitlabdaemon/.secrets/gitlab-config.json) echo "user = User.find_by(username: '');user.personal_access_tokens.create(name: 'apitoken', token_digest: Gitlab::CryptoHelper.sha256('$token'), impersonation: false, scopes: [:api]);exit;" | gitlab-rails console -e production # Reload GitLab configuration and restart GitLab - gitlab-ctl reconfigure @@ -151,16 +151,16 @@ runcmd: # CREATE SSH KEY # -------------------------------- - | - mkdir -p /home//.ssh - ssh-keygen -t ed25519 -C 'gitlab' -N '' -f /home//.ssh/id_ed25519 + mkdir -p /home/gitlabdaemon/.ssh + ssh-keygen -t ed25519 -C 'gitlab' -N '' -f /home/gitlabdaemon/.ssh/id_ed25519 # -------------------------------- # GITLAB SSH SETUP # -------------------------------- - | - key=$(cat /home//.ssh/id_ed25519.pub) - token=$(/home//gitlab_config.py --server GITLAB --value api_token --file /home//.secrets/gitlab-config.json) + key=$(cat /home/gitlabdaemon/.ssh/id_ed25519.pub) + token=$(/home/gitlabdaemon/gitlab_config.py --server GITLAB --value api_token --file /home/gitlabdaemon/.secrets/gitlab-config.json) curl --header "Authorization: Bearer $token" --header 'Content-Type:application/json' --data "{\"key\": \"$key\", \"title\": \"GitlabAPIUser\"}" /api/v4/user/keys - ssh-keyscan -H >> /home//.ssh/known_hosts + ssh-keyscan -H >> /home/gitlabdaemon/.ssh/known_hosts # -------------------------------- # WAIT FOR GITLAB REVIEW HEALTH CHECK # -------------------------------- @@ -187,17 +187,17 @@ runcmd: - echo "Configuring access to gitlab review" # Create SSH key for gitlab review access - | - key=$(cat /home//.ssh/id_ed25519.pub); - token=$(/home//gitlab_config.py --server GITLAB-REVIEW --value api_token --file /home//.secrets/gitlab-config.json) + key=$(cat /home/gitlabdaemon/.ssh/id_ed25519.pub); + token=$(/home/gitlabdaemon/gitlab_config.py --server GITLAB-REVIEW --value api_token --file /home/gitlabdaemon/.secrets/gitlab-config.json) curl --header "Authorization: Bearer $token" --header 'Content-Type:application/json' --data "{\"key\": \"$key\", \"title\": \"ReviewAPIUser\"}" /api/v4/user/keys # Get local ssh host keys, add them to known hosts under the gitlab review ip - | - echo " $(cat /etc/ssh/ssh_host_rsa_key.pub | cut -d " " -f -2)" >> /home//.ssh/known_hosts - echo " $(cat /etc/ssh/ssh_host_ed25519_key.pub | cut -d " " -f -2)" >> /home//.ssh/known_hosts - echo " $(cat /etc/ssh/ssh_host_ecdsa_key.pub | cut -d " " -f -2)" >> /home//.ssh/known_hosts + echo " $(cat /etc/ssh/ssh_host_rsa_key.pub | cut -d " " -f -2)" >> /home/gitlabdaemon/.ssh/known_hosts + echo " $(cat /etc/ssh/ssh_host_ed25519_key.pub | cut -d " " -f -2)" >> /home/gitlabdaemon/.ssh/known_hosts + echo " $(cat /etc/ssh/ssh_host_ecdsa_key.pub | cut -d " " -f -2)" >> /home/gitlabdaemon/.ssh/known_hosts # Create groups for storing unapproved and approved repos - | - token=$(/home//gitlab_config.py --server GITLAB-REVIEW --value api_token --file /home//.secrets/gitlab-config.json) + token=$(/home/gitlabdaemon/gitlab_config.py --server GITLAB-REVIEW --value api_token --file /home/gitlabdaemon/.secrets/gitlab-config.json) curl --header "Authorization: Bearer $token" --data "name=approved&path=approved&visibility=internal" /api/v4/groups curl --header "Authorization: Bearer $token" --data "name=unapproved&path=unapproved&visibility=internal" /api/v4/groups # -------------------------------- @@ -205,19 +205,19 @@ runcmd: # -------------------------------- - | echo "Configuring git" - HOME=/home/ git config --global user.name '' - HOME=/home/ git config --global user.email '@example.com' + HOME=/home/gitlabdaemon git config --global user.name '' + HOME=/home/gitlabdaemon git config --global user.email '@example.com' # -------------------------------- # ADD CRONTAB ENTRIES FOR GITLAB SCRIPTS # -------------------------------- - echo "*** Adding zipfile_to_gitlab_project.py to crontab ***" - - echo "*/10 * * * * /home//zipfile_to_gitlab_project.py" >> /etc/crontab + - echo "*/10 * * * * gitlabdaemon /home/gitlabdaemon/zipfile_to_gitlab_project.py" >> /etc/crontab - echo "*** Adding check_merge_requests.py to crontab ***" - - echo "5,15,25,35,45,55 * * * * /home//check_merge_requests.py" >> /etc/crontab + - echo "5,15,25,35,45,55 * * * * gitlabdaemon /home/gitlabdaemon/check_merge_requests.py" >> /etc/crontab # -------------------------------- - # GIVE OWNERSHIP OF THEIR HOME DIRECTORY + # GIVE gitlabdaemon OWNERSHIP OF THEIR HOME DIRECTORY # -------------------------------- - - chown -R : "/home/" + - chown -R gitlabdaemon:gitlabdaemon "/home/gitlabdaemon" # Shutdown so that we can tell when the job has finished by polling the VM state power_state: diff --git a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab.template.yaml b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab.template.yaml index a78d78e933..ae088882d8 100644 --- a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab.template.yaml +++ b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab.template.yaml @@ -66,13 +66,9 @@ write_files: EOS git_data_dirs({ "default" => { "path" => "/datadrive/gitdata" } }) -# Add the SHM admin (default) and gitlabdaemon users -# lock_passwd: Lock the password to disable password login +# Add the SRE admin (default) user users: - default - - name: gitlabdaemon - lock_passwd: True - sudo: False runcmd: # Configure server From ae42dc7def12c459322811d6f2ae09ba83e48a33 Mon Sep 17 00:00:00 2001 From: James Robinson Date: Fri, 19 Jun 2020 17:40:08 +0100 Subject: [PATCH 100/155] Updated to newer version of HackMD. Ensure that service will restart on reboot and that data is accessible outside the Docker container --- .../arm_templates/sre-webapps-template.json | 18 +++- .../cloud-init-hackmd.template.yaml | 86 +++++++++++-------- 2 files changed, 64 insertions(+), 40 deletions(-) diff --git a/deployment/secure_research_environment/arm_templates/sre-webapps-template.json b/deployment/secure_research_environment/arm_templates/sre-webapps-template.json index cdca5b90e3..9642bc7438 100644 --- a/deployment/secure_research_environment/arm_templates/sre-webapps-template.json +++ b/deployment/secure_research_environment/arm_templates/sre-webapps-template.json @@ -137,7 +137,7 @@ "managedDisk": { "storageAccountType": "Standard_LRS" }, - "diskSizeGB": 50 + "diskSizeGB": 64 }, "dataDisks": [{ "lun": 1, @@ -148,7 +148,7 @@ "managedDisk": { "storageAccountType": "Standard_LRS" }, - "diskSizeGB": 750 + "diskSizeGB": 512 }] }, "osProfile": { @@ -208,9 +208,19 @@ "managedDisk": { "storageAccountType": "Standard_LRS" }, - "diskSizeGB": 750 + "diskSizeGB": 64 }, - "dataDisks": [] + "dataDisks": [{ + "lun": 1, + "name": "[concat(parameters('HackMD_Server_Name'),'-DATA-DISK')]", + "createOption": "Empty", + "caching": "None", + "writeAcceleratorEnabled": false, + "managedDisk": { + "storageAccountType": "Standard_LRS" + }, + "diskSizeGB": 512 + }] }, "osProfile": { "computerName": "[parameters('HackMD_Server_Name')]", diff --git a/deployment/secure_research_environment/cloud_init/cloud-init-hackmd.template.yaml b/deployment/secure_research_environment/cloud_init/cloud-init-hackmd.template.yaml index 4423182abd..a71229cdca 100644 --- a/deployment/secure_research_environment/cloud_init/cloud-init-hackmd.template.yaml +++ b/deployment/secure_research_environment/cloud_init/cloud-init-hackmd.template.yaml @@ -22,21 +22,28 @@ apt: source: "deb [arch=amd64] https://download.docker.com/linux/ubuntu bionic stable" keyid: 8D81803C0EBFCD88 +# We know that exactly one data disk will be attached to this VM and it will be attached as lun1 +disk_setup: + /dev/disk/azure/scsi1/lun1: + table_type: gpt + layout: True + overwrite: True + +fs_setup: + - device: /dev/disk/azure/scsi1/lun1 + partition: 1 + filesystem: ext4 + +mounts: + - [/dev/disk/azure/scsi1/lun1-part1, /datadrive, ext4, "defaults,nofail"] + write_files: - path: "/docker-compose-hackmd.yml" content: | version: '3' services: database: - # Don't upgrade PostgreSQL by simply changing the version number - # You need to migrate the Database to the new PostgreSQL version - image: postgres:11.5 - #mem_limit: 256mb # version 2 only - #memswap_limit: 512mb # version 2 only - #read_only: true # not supported in swarm mode please enable along with tmpfs - #tmpfs: - # - /run/postgresql:size=512K - # - /tmp:size=256K + image: postgres:11.6-alpine environment: - POSTGRES_USER=hackmd - POSTGRES_PASSWORD= @@ -48,23 +55,10 @@ write_files: restart: always app: - image: nabo.codimd.dev/hackmdio/hackmd:1.4.1 - #mem_limit: 256mb # version 2 only - #memswap_limit: 512mb # version 2 only - #read_only: true # not supported in swarm mode, enable along with tmpfs - #tmpfs: - # - /tmp:size=512K - # - /hackmd/tmp:size=1M - # Make sure you remove this when you use filesystem as upload type - # - /hackmd/public/uploads:size=10M + image: nabo.codimd.dev/hackmdio/hackmd:2.1.0 volumes: - - uploads:/hackmd/public/uploads + - uploads:/home/hackmd/app/public/uploads environment: - # DB_URL is formatted like: ://:@/ - # Other examples are: - # - mysql://hackmd:@database:3306/hackmd - # - sqlite:///data/sqlite.db (NOT RECOMMENDED) - # - For details see the official sequelize docs: http://docs.sequelizejs.com/en/v3/ - CMD_DB_URL=postgres://hackmd:@database:5432/hackmd - CMD_ALLOW_ANONYMOUS=false - CMD_ALLOW_FREEURL=true @@ -78,12 +72,6 @@ write_files: - CMD_LDAP_PROVIDERNAME= - CMD_IMAGE_UPLOAD_TYPE=filesystem ports: - # Ports that are published to the outside. - # The latter port is the port inside the container. It should always stay on 3000 - # If you only specify a port it'll published on all interfaces. If you want to use a - # local reverse proxy, you may want to listen on 127.0.0.1. - # Example: - # - "127.0.0.1:3000:3000" - "3000:3000" networks: backend: @@ -99,8 +87,25 @@ write_files: # Define named volumes so data stays in place volumes: # Volume for PostgreSQL/MySQL database - database: - uploads: + database:/datadrive/database + uploads:/datadrive/hackmd + - path: "/etc/systemd/system/hackmd-app.service" + content: | + [Unit] + Description=HackMD docker compose application service + Requires=docker.service + After=docker.service + + [Service] + Type=oneshot + RemainAfterExit=yes + WorkingDirectory=/srv/docker + ExecStart=/usr/local/bin/docker-compose -f /opt/docker-hackmd/docker-compose.yml up -d + ExecStop=/usr/local/bin/docker-compose -f /opt/docker-hackmd/docker-compose.yml down + TimeoutStartSec=0 + + [Install] + WantedBy=multi-user.target # Add the SHM admin (default) and hackmddaemon users # lock_passwd: Lock the password to disable password login @@ -111,21 +116,30 @@ users: sudo: False runcmd: + # Set up the data disk + - echo "Setting up data disk..." + - mkdir -p /datadrive/database + - mkdir -p /datadrive/hackmd # Configure server - echo "Configuring server" - echo " " >> /etc/hosts - echo "Europe/London" > /etc/timezone - dpkg-reconfigure -f noninteractive tzdata - # Checking Docker status + # Ensure that Docker is running - echo "Current Docker status" - systemctl status docker # Pulling HackMD Docker image - echo "Pulling HackMD Docker image" - - git clone https://github.com/hackmdio/docker-hackmd.git /src/docker-hackmd + - git clone https://github.com/hackmdio/docker-hackmd.git /opt/docker-hackmd - echo "Overwriting HackMD configuration" - - cp /docker-compose-hackmd.yml /src/docker-hackmd/docker-compose.yml + - cp /docker-compose-hackmd.yml /opt/docker-hackmd/docker-compose.yml - echo "Starting HackMD" - - docker-compose -f /src/docker-hackmd/docker-compose.yml up -d + # Ensure that HackMD is running + - echo "Current HackMD status" + - systemctl start hackmd-app + - systemctl enable hackmd-app + - systemctl status hackmd-app + # Shutdown so that we can tell when the job has finished by polling the VM state power_state: From e6716ed9fbe1c1d70824de8249900a7b9fd99bcf Mon Sep 17 00:00:00 2001 From: James Robinson Date: Fri, 19 Jun 2020 17:57:35 +0100 Subject: [PATCH 101/155] Add a rule to allow inbound SSH from VPN admin subnet --- .../setup/Setup_SRE_WebApp_Servers.ps1 | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 b/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 index 1bfa2b3ff7..0ace54380e 100644 --- a/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 +++ b/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 @@ -56,10 +56,17 @@ $nsgAirlock = Deploy-NetworkSecurityGroup -Name $config.sre.network.nsg.airlock. Add-NetworkSecurityGroupRule -NetworkSecurityGroup $nsgAirlock ` -Name "InboundAllowReviewServer" ` -Description "Inbound allow connections from review session host" ` - -Priority 3000 ` + -Priority 2000 ` -Direction Inbound -Access Allow -Protocol * ` -SourceAddressPrefix $config.sre.rds.sessionHost3.ip -SourcePortRange * ` -DestinationAddressPrefix VirtualNetwork -DestinationPortRange * +Add-NetworkSecurityGroupRule -NetworkSecurityGroup $nsgAirlock ` + -Name "InboundAllowVpnSsh" ` + -Description "Inbound allow SSH connections from VPN subnet" ` + -Priority 3000 ` + -Direction Inbound -Access Allow -Protocol TCP ` + -SourceAddressPrefix "172.16.201.0/24" -SourcePortRange * ` # TODO fix this when this is no longer hard-coded + -DestinationAddressPrefix VirtualNetwork -DestinationPortRange 22 Add-NetworkSecurityGroupRule -NetworkSecurityGroup $nsgAirlock ` -Name "InboundDenyOtherVNet" ` -Description "Inbound deny other VNet connections" ` From a2bc238b49318a21665356c6714bb8459cbf28b5 Mon Sep 17 00:00:00 2001 From: James Robinson Date: Fri, 19 Jun 2020 18:14:15 +0100 Subject: [PATCH 102/155] Updated NSG rules --- .../setup/Setup_SRE_WebApp_Servers.ps1 | 67 +++++++++++++++---- 1 file changed, 54 insertions(+), 13 deletions(-) diff --git a/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 b/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 index 0ace54380e..e0da99fe48 100644 --- a/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 +++ b/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 @@ -14,7 +14,7 @@ Import-Module $PSScriptRoot/../../common/Security.psm1 -Force # ------------------------------------------------------------ $config = Get-SreConfig $sreId $originalContext = Get-AzContext -$_ = Set-AzContext -SubscriptionId $config.sre.subscriptionName +$null = Set-AzContext -SubscriptionId $config.sre.subscriptionName # Retrieve passwords from the keyvault @@ -38,6 +38,34 @@ $gitlabAPIToken = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -S # Set up NSGs for the webapps # --------------------------- $nsgWebapps = Deploy-NetworkSecurityGroup -Name $config.sre.webapps.nsg -ResourceGroupName $config.sre.network.vnet.rg -Location $config.sre.location +Add-NetworkSecurityGroupRule -NetworkSecurityGroup $nsgAirlock ` + -Name "InboundAllowVpnSsh" ` + -Description "Inbound allow SSH connections from VPN subnet" ` + -Priority 1000 ` + -Direction Inbound -Access Allow -Protocol TCP ` + -SourceAddressPrefix "172.16.201.0/24" -SourcePortRange * ` # TODO fix this when this is no longer hard-coded + -DestinationAddressPrefix VirtualNetwork -DestinationPortRange 22 +Add-NetworkSecurityGroupRule -NetworkSecurityGroup $nsgWebapps ` + -Name "InboundAllowHttpSessionHost" ` + -Description "Inbound allow http(s) from application session host" ` + -Priority 2000 ` + -Direction Inbound -Access Allow -Protocol TCP ` + -SourceAddressPrefix $config.sre.rds.sessionHost1.ip -SourcePortRange * ` + -DestinationAddressPrefix VirtualNetwork -DestinationPortRange 80,443 +Add-NetworkSecurityGroupRule -NetworkSecurityGroup $nsgWebapps ` + -Name "InboundAllowHttpComputeSubnet" ` + -Description "Inbound allow http(s) from compute VM subnet" ` + -Priority 3000 ` + -Direction Inbound -Access Allow -Protocol TCP ` + -SourceAddressPrefix $config.sre.network.subnets.data.cidr -SourcePortRange * ` + -DestinationAddressPrefix VirtualNetwork -DestinationPortRange 80,443 +Add-NetworkSecurityGroupRule -NetworkSecurityGroup $nsgAirlock ` + -Name "InboundDenyOtherVNet" ` + -Description "Inbound deny other VNet connections" ` + -Priority 4000 ` + -Direction Inbound -Access Deny -Protocol * ` + -SourceAddressPrefix VirtualNetwork -SourcePortRange * ` + -DestinationAddressPrefix VirtualNetwork -DestinationPortRange * Add-NetworkSecurityGroupRule -NetworkSecurityGroup $nsgWebapps ` -Name "OutboundDenyInternet" ` -Description "Outbound deny internet" ` @@ -53,34 +81,47 @@ Add-NetworkSecurityGroupRule -NetworkSecurityGroup $nsgAirlock ` -SourceAddressPrefix VirtualNetwork -SourcePortRange * ` -DestinationAddressPrefix VirtualNetwork -DestinationPortRange * $nsgAirlock = Deploy-NetworkSecurityGroup -Name $config.sre.network.nsg.airlock.name -ResourceGroupName $config.sre.network.vnet.rg -Location $config.sre.location +Add-NetworkSecurityGroupRule -NetworkSecurityGroup $nsgAirlock ` + -Name "InboundAllowVpnSsh" ` + -Description "Inbound allow SSH connections from VPN subnet" ` + -Priority 1000 ` + -Direction Inbound -Access Allow -Protocol TCP ` + -SourceAddressPrefix "172.16.201.0/24" -SourcePortRange * ` # TODO fix this when this is no longer hard-coded + -DestinationAddressPrefix VirtualNetwork -DestinationPortRange 22 Add-NetworkSecurityGroupRule -NetworkSecurityGroup $nsgAirlock ` -Name "InboundAllowReviewServer" ` -Description "Inbound allow connections from review session host" ` -Priority 2000 ` -Direction Inbound -Access Allow -Protocol * ` -SourceAddressPrefix $config.sre.rds.sessionHost3.ip -SourcePortRange * ` + -DestinationAddressPrefix VirtualNetwork -DestinationPortRange 3389 +Add-NetworkSecurityGroupRule -NetworkSecurityGroup $nsgAirlock ` + -Name "InboundDenyOtherVNet" ` + -Description "Inbound deny other VNet connections" ` + -Priority 4000 ` + -Direction Inbound -Access Deny -Protocol * ` + -SourceAddressPrefix VirtualNetwork -SourcePortRange * ` -DestinationAddressPrefix VirtualNetwork -DestinationPortRange * Add-NetworkSecurityGroupRule -NetworkSecurityGroup $nsgAirlock ` - -Name "InboundAllowVpnSsh" ` - -Description "Inbound allow SSH connections from VPN subnet" ` + -Name "OutboundAllowGitLabInternal" ` + -Description "Outbound allow GitLab internal server" ` -Priority 3000 ` - -Direction Inbound -Access Allow -Protocol TCP ` - -SourceAddressPrefix "172.16.201.0/24" -SourcePortRange * ` # TODO fix this when this is no longer hard-coded - -DestinationAddressPrefix VirtualNetwork -DestinationPortRange 22 + -Direction Outbound -Access Deny -Protocol * ` + -SourceAddressPrefix VirtualNetwork -SourcePortRange * ` + -DestinationAddressPrefix $config.sre.webapps.gitlab.ip -DestinationPortRange * Add-NetworkSecurityGroupRule -NetworkSecurityGroup $nsgAirlock ` - -Name "InboundDenyOtherVNet" ` - -Description "Inbound deny other VNet connections" ` + -Name "OutboundDenyVNet" ` + -Description "Outbound deny other VNet connections" ` -Priority 4000 ` -Direction Inbound -Access Deny -Protocol * ` -SourceAddressPrefix VirtualNetwork -SourcePortRange * ` -DestinationAddressPrefix VirtualNetwork -DestinationPortRange * - # Check that VNET and subnets exist # --------------------------------- $vnet = Deploy-VirtualNetwork -Name $config.sre.network.vnet.name -ResourceGroupName $config.sre.network.vnet.rg -AddressPrefix $config.sre.network.vnet.cidr -Location $config.sre.location $null = Deploy-Subnet -Name $config.sre.network.subnets.data.name -VirtualNetwork $vnet -AddressPrefix $config.sre.network.subnets.data.cidr -$airlockSubnet = Deploy-Subnet -Name $config.sre.network.subnets.airlock.name -VirtualNetwork $vnet -AddressPrefix $config.sre.network.subnets.airlock.cidr +$null = Deploy-Subnet -Name $config.sre.network.subnets.airlock.name -VirtualNetwork $vnet -AddressPrefix $config.sre.network.subnets.airlock.cidr # Expand GitLab cloudinit @@ -130,7 +171,7 @@ $hackmdCloudInitEncoded = [System.Convert]::ToBase64String([System.Text.Encoding # Create webapps resource group # -------------------------------- -$_ = Deploy-ResourceGroup -Name $config.sre.webapps.rg -Location $config.sre.location +$null = Deploy-ResourceGroup -Name $config.sre.webapps.rg -Location $config.sre.location # Deploy GitLab/HackMD VMs from template @@ -271,7 +312,7 @@ $params = @{ ResourceGroupName = $config.sre.webapps.rg ImageSku = "18.04-LTS" } -$_ = Deploy-UbuntuVirtualMachine @params +$null = Deploy-UbuntuVirtualMachine @params Wait-ForAzVMCloudInit -Name $vmNameReview -ResourceGroupName $config.sre.webapps.rg Add-VmToNSG -VMName $vmNameReview -NSGName $nsgAirlock Enable-AzVM -Name $vmNameReview -ResourceGroupName $config.sre.webapps.rg @@ -287,4 +328,4 @@ foreach ($nsg in @($nsgWebapps, $nsgAirlock)) { # Switch back to original subscription # ------------------------------------ -$_ = Set-AzContext -Context $originalContext; +$null = Set-AzContext -Context $originalContext; From 0f512c7c23b8585000d919253188cb339a4d9eb8 Mon Sep 17 00:00:00 2001 From: Oliver Strickson Date: Fri, 19 Jun 2020 18:46:23 +0100 Subject: [PATCH 103/155] WIP: refactor gitlab ingress py and ps1 scripts --- .../SRE_Upload_Git_Repo_to_GitlabReview.ps1 | 57 +- .../scripts/check_merge_requests.py | 310 +++++----- .../cloud_init/scripts/gitlab_config.py | 95 ++-- .../cloud_init/scripts/requests_util.py | 8 + .../scripts/zipfile_to_gitlab_project.py | 530 ++++++++---------- 5 files changed, 469 insertions(+), 531 deletions(-) create mode 100644 deployment/secure_research_environment/cloud_init/scripts/requests_util.py diff --git a/deployment/administration/SRE_Upload_Git_Repo_to_GitlabReview.ps1 b/deployment/administration/SRE_Upload_Git_Repo_to_GitlabReview.ps1 index de24107c29..3c34574ba3 100644 --- a/deployment/administration/SRE_Upload_Git_Repo_to_GitlabReview.ps1 +++ b/deployment/administration/SRE_Upload_Git_Repo_to_GitlabReview.ps1 @@ -27,22 +27,52 @@ $_ = Set-AzContext -SubscriptionId $config.sre.subscriptionName # Create local zip file # --------------------- + +# The zipfile is called "repo.zip", with the following contents: +# +# repo/ +# sourceGitURL +# targetRepoName +# sourceCommitHash +# targetBranchName +# snapshot/ +# ... repository contents + + Add-LogMessage -Level Info "Creating zipfilepath." -$zipFileName = "${targetRepoName}_${sourceCommitHash}_${targetBranchName}.zip" -$zipFilePath = Join-Path $PSScriptRoot $zipFileName +## $zipFileName = "${targetRepoName}_${sourceCommitHash}_${targetBranchName}.zip" +$zipFileName = "repo.zip" -Add-LogMessage -Level Info "About to git clone " -$tempDir = New-Item -ItemType Directory -Path (Join-Path ([System.IO.Path]::GetTempPath()) ([System.IO.Path]::GetRandomFileName()) "$targetRepoName") +$tempDir = New-Item -ItemType Directory -Path (Join-Path ([System.IO.Path]::GetTempPath()) ([System.IO.Path]::GetRandomFileName())) -Invoke-Expression -Command "git clone $sourceGitURL $tempDir" +$repoPath = Join-Path $tempDir "repo" +New-Item -ItemType Directory $repoPath + +## $workingDir = Get-Location -Set-Location $tempDir -Invoke-Expression -Command "git checkout $sourceCommitHash" +Set-Location $repoPath + +Add-LogMessage -Level Info "About to git clone " +git clone $sourceGitURL snapshot + +Set-Location "snapshot" + +git checkout $sourceCommitHash # Remove the .git directory Remove-Item -Path ".git" -Recurse -Force -# Zip this directory -if (Test-Path $zipFilePath) { Remove-Item $zipFilePath } -Compress-Archive -CompressionLevel NoCompression -Path $tempDir -DestinationPath $zipFilePath + +## Record some metadata about the repository +Set-Location $repoPath +$sourceGitURL > sourceGitURL +$targetRepoName > targetRepoName +$sourceCommitHash > sourceCommitHash +$targetBranchName > targetBranchName + +# Zip contents and meta +Set-Location $tempDir + +$zipFilePath = Join-Path $tempDir $zipFileName +Compress-Archive -CompressionLevel NoCompression -Path $repoPath -DestinationPath $zipFilePath if ($?) { Add-LogMessage -Level Success "Zip file creation succeeded! $zipFilePath" } else { @@ -81,11 +111,12 @@ Add-LogMessage -Level Info "Got SAS token and URL $remoteUrl" $sreAdminUsername = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.Name -SecretName $config.sre.keyVault.secretNames.adminUsername -DefaultValue "sre$($config.sre.id)admin".ToLower() -# Create remote script (make a directory /zfiles/ and run CURL to download blob to there) +# Create remote script (make a subdirectory of /tmp/zipfiles and run CURL to download blob to there) $script = @" #!/bin/bash -mkdir -p /tmp/zipfiles -curl -X GET -o /tmp/zipfiles/${zipFileName} "${remoteUrl}" +mkdir -p /tmp/zipfiles/ +tmpdir=`$(mktemp -d /tmp/zipfiles/XXXXXXXXXXXXXXXXXXXX) +curl -X GET -o `$tmpdir/${zipFileName} "${remoteUrl}" chown -R ${sreAdminUsername}:${sreAdminUsername} /tmp/zipfiles/ "@ diff --git a/deployment/secure_research_environment/cloud_init/scripts/check_merge_requests.py b/deployment/secure_research_environment/cloud_init/scripts/check_merge_requests.py index 46df71d3b5..1994063863 100644 --- a/deployment/secure_research_environment/cloud_init/scripts/check_merge_requests.py +++ b/deployment/secure_research_environment/cloud_init/scripts/check_merge_requests.py @@ -6,18 +6,17 @@ 1) Get open merge requests in the approved group on gitlab review. 2) Check whether any of them meet the approval conditions. By default: status -is can be merged, not flagged as work in progress, no unresolved discussions, -at least two upvotes, and no downvotes. +is can be merged, no unresolved discussions, at least two upvotes, and no downvotes. 3) Accept approved merge requests (merged unapproved repo into approved repo). -4) Push whole approved repo to gitlab , creating the repo if it doesn't +4) Push whole approved repo to gitlab, creating the repo if it doesn't already exist. -This script creates two log files in the same directory that it is run from: +This script creates two log files in the directory where it is run: * check_merge_requests.log : A verbose log of the steps performed in each run -and any errors encountered. +and any errors encountered * accepted_merge_requests.log : A list of merge requests that have been accepted in CSV format with columns merged time, source project, source branch, source -commit, target project, target branch and target commit. +commit, target project, target branch and target commit """ import sys @@ -29,6 +28,7 @@ from logging.handlers import RotatingFileHandler from gitlab_config import get_api_config +## # Setup logging to console and file. File uses RotatingFileHandler to create # logs over a rolling window, 10 files each max 5 MB in size. logger = logging.getLogger("merge_requests_logger") @@ -43,9 +43,13 @@ logger.addHandler(f_handler) logger.addHandler(c_handler) -# exit status codes -OK_CODE = 0 # everything ran normally -ERROR_CODE = 1 # logger.error or critical encountered +# Separate logfile for accepted merge requests, no file size limit +accepted_mr_logger = logging.getLogger("accepted_merge_requests_logger") +accepted_mr_logger.setLevel(logging.INFO) +accepted_mr_formatter = logging.Formatter("%(message)s") +accepted_mr_handler = logging.FileHandler("accepted_merge_requests.log") +accepted_mr_handler.setFormatter(accepted_mr_formatter) +accepted_mr_logger.addHandler(accepted_mr_handler) def check_project_exists(repo_name, config): @@ -57,7 +61,7 @@ def check_project_exists(repo_name, config): repo_name : str The name of a repo (not a URL) to search for in the ingress namespace. config : dict - Gitlab details and secrets as returned by get_gitlab_config + Gitlab details and secrets as returned by get_api_config Returns ------- @@ -76,7 +80,8 @@ def check_project_exists(repo_name, config): # Does repo_name exist? response = requests.get( - config["api_url"] + "/projects/" + repo_path_encoded, headers=config["headers"] + config["api_url"] + "/projects/" + repo_path_encoded, + headers=config["headers"], ) if response.status_code == 404: @@ -88,7 +93,10 @@ def check_project_exists(repo_name, config): # to raise an exception on unexpected "successful" responses # (not 200) raise requests.HTTPError( - "Unexpected response: " + response.reason + ", content: " + response.text + "Unexpected response: " + + response.reason + + ", content: " + + response.text ) @@ -105,7 +113,7 @@ def update_repo(git_url, repo_name, branch_name, config): repo_name : str Name of repo to create on. config : dict - Details and secrets as returned by get_gitlab_config + Details and secrets as returned by get_api_config """ # clone the repo from git_url (on GITLAB-REVIEW), removing any of @@ -143,70 +151,6 @@ def update_repo(git_url, repo_name, branch_name, config): ) -def get_request(endpoint, headers, params=None): - """Wrapper around requests.get that returns a JSON if request was successful - or raises a HTTPError otherwise. - - Parameters - ---------- - endpoint : str - URL of API endpoint to call - headers : dict - Request headers - params : dict, optional - Request parameters - - Returns - ------- - dict - JSON of request result - - Raises - ------ - requests.HTTPError - If not r.ok, raise HTTPError with details of failure - """ - r = requests.get(endpoint, headers=headers, params=params) - if r.ok: - return r.json() - else: - raise requests.HTTPError( - f"Request failed: URL {endpoint}, CODE {r.status_code}, CONTENT {r.content}" - ) - - -def put_request(endpoint, headers, params=None): - """Wrapper around requests.put that returns a JSON if request was successful - or raises a HTTPError otherwise. - - Parameters - ---------- - endpoint : str - URL of API endpoint to call - headers : dict - Request headers - params : dict, optional - Request parameters - - Returns - ------- - dict - JSON of request result - - Raises - ------ - requests.HTTPError - If not r.ok, raise HTTPError with details of failure - """ - r = requests.put(endpoint, headers=headers, params=params) - if r.ok: - return r.json() - else: - raise requests.HTTPError( - f"Request failed: URL {endpoint}, CODE {r.status_code}, CONTENT {r.content}" - ) - - def get_group_id(group_name, config): """Get the ID of a group on a gitlab server. @@ -215,7 +159,7 @@ def get_group_id(group_name, config): group_name : str Group name to find. config : dict - Gitlab details and secrets as returned by get_gitlab_config + Gitlab details and secrets as returned by get_api_config Returns ------- @@ -243,7 +187,7 @@ def get_project(project_id, config): project_id : int ID of the project on the gitlab server. config : dict - Gitlab details and secrets as returned by get_gitlab_config + Gitlab details and secrets as returned by get_api_config Returns ------- @@ -262,7 +206,7 @@ def get_merge_requests_for_approval(config): Parameters ---------- config : dict - Gitlab details and secrets as returned by get_gitlab_config + Gitlab details and secrets as returned by get_api_config Returns ------- @@ -289,7 +233,7 @@ def count_unresolved_mr_discussions(mr, config): mr : dict A merge request JSON as returned by the gitlab API config : dict - Gitlab details and secrets as returned by get_gitlab_config + Gitlab details and secrets as returned by get_api_config Returns ------- @@ -316,7 +260,7 @@ def count_unresolved_mr_discussions(mr, config): return n_unresolved -def accept_merge_request(mr, config): +def accept_merge_request(config, mr): """Accept a merge request Parameters @@ -325,7 +269,7 @@ def accept_merge_request(mr, config): For the merge request to approve: The merge request JSON as returned by the gitlab API. config : dict - Gitlab details and secrets as returned by get_gitlab_config + Gitlab details and secrets as returned by get_api_config Returns ------- @@ -336,127 +280,123 @@ def accept_merge_request(mr, config): project_id = mr["project_id"] mr_iid = mr["iid"] endpoint = ( - config["api_url"] + f"/projects/{project_id}/merge_requests/{mr_iid}/merge" + config["api_url"] + + f"/projects/{project_id}/merge_requests/{mr_iid}/merge" ) - return put_request(endpoint, headers=config["headers"]) + response = requests.put(endpoint, headers=config["headers"]) + if response.status_code != 200: + raise http_error("Accepting merge request", response) + + return response.json() + + +def merge_allowed(config_gitlabreview, mr): + unresolved = count_unresolved_mr_discussions(mr, config_gitlabreview) + checks = { + "unresolved_check": unresolved == 0, + "upvotes_check": mr["upvotes"] >= 2, + "downvotes_check": mr["downvotes"] == 0, + } + return (all(checks.values()), checks) -def check_merge_requests(): + +def handle_all_merge_requests(): """Main function to check merge requests in the approved group on gitlab review, approve them where appropriate, and then push the approved repos to the normal - gitlab server for users.. + gitlab server for users. """ logger.info(f"STARTING RUN") - return_code = OK_CODE - - try: - config_gitlabreview = get_api_config(server="GITLAB-REVIEW") - config_gitlab = get_api_config(server="GITLAB") - except Exception as e: - logger.critical(f"Failed to load gitlab secrets: {e}") - return ERROR_CODE - try: - gitlab_status = requests.get( - config_gitlab["api_url"] + "/projects", - headers=config_gitlab["headers"], - timeout=10, - ) - if not gitlab_status.ok: - logger.critical( - f"Gitlab Not Responding: {gitlab_status.status_code}, CONTENT {gitlab_status.content}" - ) - return ERROR_CODE - except Exception as e: - logger.critical(f"Gitlab Not Responding: {e}") - return ERROR_CODE + config_gitlabreview = get_api_config(server="GITLAB-REVIEW") + config_gitlab = get_api_config(server="GITLAB") + + response = requests.get( + config_gitlab["api_url"] + "/projects", + headers=config_gitlab["headers"], + timeout=10, + ) + if response.status_code != 200: + raise http_error("Getting project list", response) logger.info("Getting open merge requests for approval") - try: - merge_requests = get_merge_requests_for_approval(config_gitlabreview) - except Exception as e: - logger.critical(f"Failed to get merge requests: {e}") - return ERROR_CODE + + ## TODO throw in get_merge_requests_for_approval + merge_requests = get_merge_requests_for_approval(config_gitlabreview) + logger.info(f"Found {len(merge_requests)} open merge requests") + mr_errors_encountered = 0 for i, mr in enumerate(merge_requests): logger.info("-" * 20) - logger.info(f"Merge request {i+1} out of {len(merge_requests)}") + logger.info(f"Merge request {i+1} of {len(merge_requests)}") + logger.info(f"Checking merge request {mr}") + + if mr["merge_status"] != "can_be_merged": + logger.error( + "This Merge Request's merge status indicates that " + "it cannot be merged. This should never happen. " + "Skipping this MR." + ) + mr_errors_encountered += 1 + continue + try: - # Extract merge request details - source_project = get_project(mr["source_project_id"], config_gitlabreview) - logger.info(f"Source Project: {source_project['name_with_namespace']}") - logger.info(f"Source Branch: {mr['source_branch']}") - target_project = get_project(mr["project_id"], config_gitlabreview) - logger.info(f"Target Project: {target_project['name_with_namespace']}") - target_branch = mr["target_branch"] - logger.info(f"Target Branch: {target_branch}") - logger.info(f"Commit SHA: {mr['sha']}") - logger.info(f"Created At: {mr['created_at']}") - status = mr["merge_status"] - if status != "can_be_merged": - # Should never get merge conflicts so if we do something has - # gone wrong - log an error and skip this merge request. - logger.error(f"Merge Status: {status}") - return_code = ERROR_CODE - continue + can_merge, merge_checks = merge_allowed(config_gitlabreview, mr) + if can_merge: + logger.info("Merge request has been approved. Proceeding with merge.") + source_project = get_project(config_gitlabreview, mr["source_project_id"]) + target_project = get_project(config_gitlabreview, mr["project_id"]) + merge_result = accept_merge_request(config_gitlabreview, mr) + + logger.info("Merge completed") + accepted_mr_logger.info( + f"{merge_result['merged_at']}, " + f"{source_project['name_with_namespace']}, " + f"{mr['source_branch']}, " + f"{mr['sha']}, " + f"{target_project['name_with_namespace']}, " + f"{mr['target_branch']}, " + f"{merge_result['merge_commit_sha']}" + ) + + logger.info("Pushing project to gitlab user server.") + update_repo( + config_gitlab + target_project["ssh_url_to_repo"], + target_project["name"], + target_branch, + ) + logger.info("Done") + else: - logger.info(f"Merge Status: {status}") - unresolved = count_unresolved_mr_discussions(mr, config_gitlabreview) - logger.info(f"Unresolved Discussions: {unresolved}") - upvotes = mr["upvotes"] - logger.info(f"Upvotes: {upvotes}") - downvotes = mr["downvotes"] - logger.info(f"Downvotes: {downvotes}") - except Exception as e: - logger.error(f"Failed to extract merge request details: {e}") - return_code = ERROR_CODE + logger.info( + f"Merge request has not been approved: skipping. Reason: {merge_checks}" + ) + + ## Errors from GitLab requests and subprocess + except requests.HTTPError, subprocess.CalledProcessError: + logger.exception( + f"Handling merge request failed for {mr}. Attempting to continue with remaining merge requests." + ) + mr_errors_encountered += 1 continue - if ( - unresolved == 0 - and upvotes >= 2 - and downvotes == 0 - ): - logger.info("Merge request has been approved. Proceeding with merge.") - try: - result = accept_merge_request(mr, config_gitlabreview) - except Exception as e: - logger.error(f"Merge failed! {e}") - return_code = ERROR_CODE - continue - if result["state"] == "merged": - logger.info(f"Merge successful! Merge SHA {result['merge_commit_sha']}") - try: - # Save details of accepted merge request to separate log file - with open("accepted_merge_requests.log", "a") as f: - f.write( - f"{result['merged_at']}, {source_project['name_with_namespace']}, {mr['source_branch']}, {mr['sha']}, {target_project['name_with_namespace']}, {mr['target_branch']}, {result['merge_commit_sha']}\n" - ) - except Exception as e: - logger.error(f"Failed to log accepted merge request: {e}") - return_code = ERROR_CODE - try: - logger.info("Pushing project to gitlab user server.") - update_repo( - target_project["ssh_url_to_repo"], - target_project["name"], - target_branch, - config_gitlab, - ) - except Exception as e: - logger.error(f"Failed to push to gitlab user server: {e}") - return_code = ERROR_CODE - else: - logger.error(f"Merge failed! Merge status is {result['state']}") - return_code = ERROR_CODE - else: - logger.info("Merge request has not been approved. Skipping.") - logger.info(f"RUN FINISHED") + + logger.info("RUN FINISHED") logger.info("=" * 30) - - return return_code + + return mr_errors_encountered + if __name__ == "__main__": - return_code = check_merge_requests() + + try: + mr_errors_encountered = handle_all_merge_requests() + except Exception as e: + logger.exception("Error handling merge requests") + raise + + return_code = 0 if mr_errors_encountered == 0 else 1 + sys.exit(return_code) diff --git a/deployment/secure_research_environment/cloud_init/scripts/gitlab_config.py b/deployment/secure_research_environment/cloud_init/scripts/gitlab_config.py index 21f175c585..736521f76a 100755 --- a/deployment/secure_research_environment/cloud_init/scripts/gitlab_config.py +++ b/deployment/secure_research_environment/cloud_init/scripts/gitlab_config.py @@ -4,42 +4,41 @@ import argparse from pathlib import Path - -def get_gitlab_config(file=None, server=None, value=None): - """Get GitLab server details and user secrets. - - Parameters - ---------- - file : str, optional - Path to configuration file, by default None which resolves to - .secrets/gitlab-config.json in the user's home directory. - server : str, optional - Name of the server to get details for (must match format in config file), - by default None which returns alls ervers. - value : str, optional - Name of the configuration value to return, by default None which returns - all parameters. - - Returns - ------- - dict or str - If server and value are not None, str of the requested value. If only - server or neither specified, dict of all the relevant values. - """ - if file is None: - file = f"{Path.home()}/.secrets/gitlab-config.json" - - with open(file, "r") as f: - config = json.load(f) - - if server is None and value is None: - return config - elif value is None: - return config[server] - elif server is None: - raise ValueError("If value is given, server must also be given.") - else: - return config[server][value] +# def get_gitlab_config(file=None, server=None, value=None): +# """Get GitLab server details and user secrets. + +# Parameters +# ---------- +# file : str, optional +# Path to configuration file, by default None which resolves to +# .secrets/gitlab-config.json in the user's home directory. +# server : str, optional +# Name of the server to get details for (must match format in config file), +# by default None which returns alls ervers. +# value : str, optional +# Name of the configuration value to return, by default None which returns +# all parameters. + +# Returns +# ------- +# dict or str +# If server and value are not None, str of the requested value. If only +# server or neither specified, dict of all the relevant values. +# """ +# if file is None: +# file = f"{Path.home()}/.secrets/gitlab-config.json" + +# with open(file, "r") as f: +# config = json.load(f) + +# if server is None and value is None: +# return config +# elif value is None: +# return config[server] +# elif server is None: +# raise ValueError("If value is given, server must also be given.") +# else: +# return config[server][value] def get_api_config(server, file=None): @@ -58,7 +57,11 @@ def get_api_config(server, file=None): dict Secrets api_url, api_token, ip and headers. """ - config = get_gitlab_config(file=file, server=server, value=None) + if file is None: + file = f"{Path.home()}/.secrets/gitlab-config.json" + + with open(file, "r") as f: + config = json.load(f) ip = config["ip_address"] token = config["api_token"] @@ -68,13 +71,13 @@ def get_api_config(server, file=None): return {"api_url": api_url, "api_token": token, "ip": ip, "headers": headers} -if __name__ == "__main__": - parser = argparse.ArgumentParser(description="Get GitLab configuration values.") - parser.add_argument("--file", help="Location of config file.", default=None) - parser.add_argument( - "--server", help="Name of server to get config for.", default=None - ) - parser.add_argument("--value", help="Configuration value to get.", default=None) - args = parser.parse_args() +# if __name__ == "__main__": +# parser = argparse.ArgumentParser(description="Get GitLab configuration values.") +# parser.add_argument("--file", help="Location of config file.", default=None) +# parser.add_argument( +# "--server", help="Name of server to get config for.", default=None +# ) +# parser.add_argument("--value", help="Configuration value to get.", default=None) +# args = parser.parse_args() - print(get_gitlab_config(file=args.file, server=args.server, value=args.value)) +# print(get_gitlab_config(file=args.file, server=args.server, value=args.value)) diff --git a/deployment/secure_research_environment/cloud_init/scripts/requests_util.py b/deployment/secure_research_environment/cloud_init/scripts/requests_util.py new file mode 100644 index 0000000000..2efb64ea0a --- /dev/null +++ b/deployment/secure_research_environment/cloud_init/scripts/requests_util.py @@ -0,0 +1,8 @@ +import requests + + +def http_error(msg, response): + return requests.HTTPError( + msg + ": Unexpected response: " + response.reason + " (" + + response.status_code + "), content: " + resonse.text + ) diff --git a/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py b/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py index 0fbe8148be..ec49a2a46e 100644 --- a/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py +++ b/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py @@ -4,12 +4,14 @@ import re import requests import subprocess +import tempfile from zipfile import ZipFile, BadZipFile from urllib.parse import quote as url_quote from pathlib import Path import logging from logging.handlers import RotatingFileHandler from gitlab_config import get_api_config +from requests_util import http_error logger = logging.getLogger("project_upload_logger") logger.setLevel(logging.INFO) @@ -24,108 +26,91 @@ logger.addHandler(c_handler) -def unzip_zipfiles(zipfile_dir, tmp_unzipped_dir): +def unzip_zipfiles(zipfile_dir): output_list = [] - repo_commit_regex = re.compile("([-\w]+)_([a-f\d]+)_([\S]+).zip") - # tear down and recreate the directory where we will put the unpacked zip - shutil.rmtree(tmp_unzipped_dir, ignore_errors=True) - os.makedirs(tmp_unzipped_dir) - # look in a directory for zipfiles try: - zipfiles = os.listdir(zipfile_dir) + zipfile_subdirs = os.listdir(zipfile_dir) except (FileNotFoundError): logger.info( - "Zipfile dir {} not found - assume nothing to unzip".format(zipfile_dir) + f"Zipfile dir {zipfile_dir} not found - assuming nothing to unzip" ) return [] - for zipfile in zipfiles: - filename_match = repo_commit_regex.search(zipfile) - if not filename_match: - logger.error("Badly named zipfile! {}".format(zipfile)) - continue - repo_name, commit_hash, branch = filename_match.groups() - # unzip + for d in zipfile_subdirs: + zipfile_subdir = os.path.join(zipfile_dir, d) + zipfile_path = os.path.join(zipfile_subdir, "repo.zip") + unpacked_location = os.path.join(zipfile_subdir, "repo") + ## ensure "repo" does not already exist (from a previous failed attempt) + subprocess.run("rm", "-rf", unpacked_location, check=True) try: - zipfile_path = os.path.join(zipfile_dir, zipfile) with ZipFile(zipfile_path, "r") as zip_obj: - zip_obj.extractall(path=tmp_unzipped_dir) - # we should have made a new directory - find its name - unpacked_zips = os.listdir(tmp_unzipped_dir) - unpacked_location = os.path.join(tmp_unzipped_dir, unpacked_zips[0]) - output_list.append((repo_name, commit_hash, branch, unpacked_location)) - except (BadZipFile): - logger.error("Bad zipfile: {}".format(zipfile)) + zip_obj.extractall(path=zipfile_subdir) + + repo_details = [ + Path(os.path.join(unpacked_location, fname)) + .read_text() + .rstrip() + for fname in ( + "targetRepoName", + "sourceCommitHash", + "targetBranchName", + "sourceGitURL", + ) + ] + output_list.append((*repo_details, zipfile_subdir)) + except (BadZipFile, FileNotFoundError, IsADirectoryError) as e: + logger.exception( + f"Error when processing zipfile at {zipfile_subdir}. Continuing with remaining zipfiles." + ) continue return output_list -def get_group_namespace_ids( - gitlab_url, gitlab_token, groups=["approved", "unapproved"] -): - namespaces_url = "{}/namespaces/".format(gitlab_url) - response = requests.get( - namespaces_url, headers={"Authorization": "Bearer " + gitlab_token} - ) +def get_group_namespace_ids(gitlab_config, groups=["approved", "unapproved"]): + namespaces_url = "{}/namespaces/".format(gitlab_config["api_url"]) + response = requests.get(namespaces_url, headers=gitlab_config["headers"]) if response.status_code != 200: - raise RuntimeError( - "Bad request: {} {}".format(response.status_code, response.content) - ) + raise http_error("Geting group namespace ids", response) + gitlab_namespaces = response.json() - namespace_id_dict = {} - for namespace in gitlab_namespaces: - if namespace["kind"] == "group" and namespace["name"] in groups: - namespace_id_dict[namespace["name"]] = namespace["id"] - return namespace_id_dict + + return { + namespace["name"]: namespace["id"] + for namespace in gitlab_namespaces + if namespace["kind"] == "group" and namespace["name"] in groups + } -def get_gitlab_project_list(gitlab_url, gitlab_token): - # list currently existing projects on Gitlab - projects_url = "{}/projects/".format(gitlab_url) +def get_gitlab_project_list(gitlab_config): + projects_url = "{}/projects/".format(gitlab_config["api_url"]) response = requests.get( projects_url, - headers={"Authorization": "Bearer " + gitlab_token}, + headers=gitlab_config["headers"], params={"owned": True, "simple": True}, ) - if response.status_code != 200: - raise RuntimeError( - "Bad request: {} {}".format(response.status_code, response.content) - ) + raise http_error("Getting project list", response) + gitlab_projects = response.json() return gitlab_projects -def check_if_project_exists(repo_name, namespace_id, gitlab_url, gitlab_token): - projects = get_gitlab_project_list(gitlab_url, gitlab_token) +def check_if_project_exists(gitlab_config, repo_name, namespace_id): + projects = get_gitlab_project_list(gitlab_config) for project in projects: - if project["name"] == repo_name and project["namespace"]["id"] == namespace_id: - return True + if ( + project["name"] == repo_name + and project["namespace"]["id"] == namespace_id + ): + return project return False -def get_or_create_project(repo_name, namespace_id, gitlab_url, gitlab_token): - already_exists = check_if_project_exists( - repo_name, namespace_id, gitlab_url, gitlab_token - ) - if already_exists: - projects = get_gitlab_project_list(gitlab_url, gitlab_token) - for project_info in projects: - if ( - project_info["name"] == repo_name - and project_info["namespace"]["id"] == namespace_id - ): - return project_info - else: - project_info = create_project(repo_name, namespace_id, gitlab_url, gitlab_token) - return project_info - - -def create_project(repo_name, namespace_id, gitlab_url, gitlab_token): - projects_url = "{}/projects/".format(gitlab_url) +def create_project(gitlab_config, repo_name, namespace_id): + projects_url = "{}/projects/".format(gitlab_config["api_url"]) response = requests.post( projects_url, - headers={"Authorization": "Bearer " + gitlab_token}, + headers=gitlab_config["headers"], data={ "name": repo_name, "path": repo_name, @@ -134,7 +119,10 @@ def create_project(repo_name, namespace_id, gitlab_url, gitlab_token): "default_branch": "_gitlab_ingress_review", }, ) - assert response.json()["name"] == repo_name + + if response.status_code != 200: + raise http_error("Creating project", response) + project_info = response.json() logger.info( "Created project {} in namespace {}, project_id {}".format( @@ -190,221 +178,195 @@ def create_project(repo_name, namespace_id, gitlab_url, gitlab_token): commit history. """ # Make the first commit to the project with the README - project_commit_url = ( - f"{gitlab_url}/projects/{project_info['id']}/repository/commits" + project_commit_url = "{}/projects/{}/repository/commits".format( + gitlab_config["api_url"], project_info["id"] ) - response = requests.post( + response_commit = requests.post( project_commit_url, - headers={"Authorization": "Bearer " + gitlab_token}, + headers=gitlab_config["headers"], json={ "branch": "_gitlab_ingress_review", "commit_message": "Initial commit", "actions": [ - {"action": "create", "file_path": "README.md", "content": README} + { + "action": "create", + "file_path": "README.md", + "content": README, + } ], }, ) + + if response_commit.status_code != 201: + raise http_error("Making first commit to project", response_commit) + return project_info -def check_if_branch_exists(branch_name, project_id, gitlab_url, gitlab_token): - branches_url = "{}/projects/{}/repository/branches".format(gitlab_url, project_id) - response = requests.get( - branches_url, headers={"Authorization": "Bearer " + gitlab_token} +def get_or_create_project(gitlab_config, repo_name, namespace_id): + project = check_if_project_exists(gitlab_config, repo_name, namespace_id) + if project: + return project + else: + return create_project(gitlab_config, repo_name, namespace_id) + + +def check_if_branch_exists(gitlab_config, branch_name, project_id): + branches_url = "{}/projects/{}/repository/branches".format( + gitlab_config["api_url"], project_id ) + response = requests.get(branches_url, headers=gitlab_config["headers"]) if response.status_code != 200: - raise RuntimeError( - "Unable to check for branch {} on project {}: {}".format( - branch_name, project_id, r.content - ) + raise http_error( + f"Checking for branch {branch_name} on project {project_id}", + response, ) + branches = response.json() for branch_info in branches: if branch_info["name"] == branch_name: - return True + return branch_info return False -def create_branch( +def create_branch_if_not_exists( + gitlab_config, branch_name, project_id, - gitlab_url, - gitlab_token, reference_branch="_gitlab_ingress_review", ): - # assume branch doesn't already exist - create it! - branch_url = "{}/projects/{}/repository/branches".format(gitlab_url, project_id) + branch = check_if_branch_exists(gitlab_config, branch_name, project_id) + if branch: + logger.info(f"Branch {branch_name} exists for project {project_id}") + return branch + + ## Branch does not exist + branch_url = "{}/projects/{}/repository/branches".format( + gitlab_config["api_url"], project_id + ) response = requests.post( branch_url, - headers={"Authorization": "Bearer " + gitlab_token}, + headers=gitlab_config["headers"], data={"branch": branch_name, "ref": reference_branch}, ) if response.status_code != 201: - raise RuntimeError( - "Problem creating branch {}: {}".format(branch_name, response.content) - ) + raise http_error(f"Creating branch {branch_name}", response) + + logger.info(f"Branch {branch_name} created for project {project_id}") + branch_info = response.json() - assert branch_info["name"] == branch_name return branch_info -def create_branch_if_not_exists( - branch_name, - project_id, - gitlab_url, - gitlab_token, - log_project_info, - reference_branch="_gitlab_ingress_review", +def check_if_merge_request_exists( + gitlab_config, + source_project_id, + source_branch, + target_project_id, + target_branch, ): - branch_exists = check_if_branch_exists( - branch_name, - project_id, - gitlab_url, - gitlab_token, + mr_url = "{}/projects/{}/merge_requests".format( + gitlab_config["api_url"], target_project_id ) - if not branch_exists: - branch_info = create_branch( - branch_name, - project_id, - gitlab_url, - gitlab_token, - reference_branch, - ) - assert branch_info["name"] == branch_name - logger.info( - "{} branch {} created".format( - log_project_info, branch_name - ) - ) - else: - logger.info( - "{} branch {} already exists".format( - log_project_info, branch_name - ) - ) - + response = requests.get(mr_url, headers=gitlab_config["headers"]) + if response.status_code != 200: + raise http_error("Request to check existence of MR failed", response) + merge_requests = response.json() -def check_if_merge_request_exists( - source_branch, target_project_id, target_branch, gitlab_url, gitlab_token -): - mr_url = "{}/projects/{}/merge_requests".format(gitlab_url, target_project_id) - response = requests.get(mr_url, headers={"Authorization": "Bearer " + gitlab_token}) - if response.status_code != 200: - raise RuntimeError( - "Request to check existence of MR failed: {} {}".format( - response.status_code, response.content - ) - ) - for mr in response.json(): + ## return the (known unique) merge request if found, otherwise False + for mr in merge_requests: if ( mr["source_branch"] == source_branch and mr["target_branch"] == target_branch + and mr["source_project_id"] == source_project_id ): - logger.info( - "Merge request {} -> {} already exists".format( - source_branch, target_branch - ) - ) return mr return False -def create_merge_request( +def create_merge_request_if_not_exists( + gitlab_config, repo_name, source_project_id, source_branch, target_project_id, target_branch, - gitlab_url, - gitlab_token, ): - # first need to create a forked-from relationship between the projects - fork_url = "{}/projects/{}/fork/{}".format( - gitlab_url, source_project_id, target_project_id + ## Check whether requested MR exists, return it if it does + + mr = check_if_merge_request_exists( + gitlab_config, + source_project_id, + source_branch, + target_project_id, + target_branch, ) - response = requests.post( - fork_url, headers={"Authorization": "Bearer " + gitlab_token} + if mr: + logger.info(f"Merge Request for {repo_name} already exists") + return mr + + ## Ensure fork relationship is established + + fork_url = "{}/projects/{}/fork/{}".format( + gitlab_config["api_url"], source_project_id, target_project_id ) + response = requests.post(fork_url, headers=gitlab_config["headers"]) + # status code 201 if fork relationship created, or 409 if already there if (response.status_code != 201) and (response.status_code != 409): - raise RuntimeError( - "Unable to create fork relationship: {} {}".format( - response.status_code, response.content - ) + raise http_error( + f"Creating fork request between projects {source_project_id} and {target_project_id}", + response, ) - mr_url = "{}/projects/{}/merge_requests".format(gitlab_url, source_project_id) - title = "{}: {} to {}".format(repo_name, source_branch, target_branch) + mr_url = "{}/projects/{}/merge_requests".format( + gitlab_config["api_url"], source_project_id, + ) response = requests.post( mr_url, - headers={"Authorization": "Bearer " + gitlab_token}, + headers=gitlab_config["headers"], data={ "source_branch": source_branch, "target_branch": target_branch, "target_project_id": target_project_id, - "title": title, + "title": f"{repo_name}: {source_branch} to {target_branch}", }, ) - if (response.status_code != 201): - # raise RuntimeError("Problem creating Merge Request {} {} {}: {}"\ - # .format(repo_name, source_branch,target_branch, - # response.content)) - ##### TEMPORARY - don't raise an error here - we get 500 status code - ##### even though MR is created it - under investigation. - logger.error( - "Problem creating Merge Request {} {} {}: {}".format( - repo_name, source_branch, target_branch, response.content - ) - ) - return {} - mr_info = response.json() - return mr_info - -def create_merge_request_if_not_exists( - repo_name, - source_project_id, - source_branch, - target_project_id, - target_branch, - gitlab_url, - gitlab_token, -): - mr_exists = check_if_merge_request_exists( + ## Check explicitly whether merge request exists, since sometimes + ## a 500 status is spuriously returned + mr = check_if_merge_request_exists( + gitlab_config, + source_project_id, source_branch, target_project_id, target_branch, - gitlab_url, - gitlab_token ) - if mr_exists: - logger.info( - "Merge Request for {} {} to {} already exists".format( - repo_name, source_branch, target_branch - ) - ) - mr_info = mr_exists - else: - mr_info = create_merge_request( - repo_name, - source_project_id, - source_branch, - target_project_id, - target_branch, - gitlab_url, - gitlab_token - ) - logger.info( - "Created merge request {} -> {}".format(source_branch, target_branch) + if mr and response.status_code == 500: + logger.error( + f"Response 500 ({response.reason}) returned when creating merge request for {repo_name}, although the merge request was created. This may not signal a genuine problem." ) + return mr + + elif response.status_code != 201: + raise http_error(f"Creating merge request for {repo_name}", response) + + logger.info(f"Created merge request {source_branch} -> {target_branch}") + mr_info = response.json() return mr_info def clone_commit_and_push( - repo_name, path_to_unzipped_repo, tmp_repo_dir, branch_name, - target_branch_name, remote_url, target_project_url, commit_hash + repo_name, + path_to_unzipped_repo, + tmp_repo_dir, + branch_name, + target_branch_name, + remote_url, + target_project_url, + commit_hash, ): # Clone the repo subprocess.run(["git", "clone", remote_url], cwd=tmp_repo_dir, check=True) @@ -412,17 +374,25 @@ def clone_commit_and_push( assert os.path.exists(working_dir) # Add upstream (target repo) to this repo - subprocess.run(["git", "remote", "add", "approved", target_project_url], cwd=working_dir, check=True) + subprocess.run( + ["git", "remote", "add", "approved", target_project_url], + cwd=working_dir, + check=True, + ) subprocess.run(["git", "fetch", "approved"], cwd=working_dir, check=True) # Checkout the branch with the requested name (creating it at the # current commit of the default branch if it doesn't exist) - git_checkout_result = subprocess.run(["git", "checkout", target_branch_name], cwd=working_dir) + git_checkout_result = subprocess.run( + ["git", "checkout", target_branch_name], cwd=working_dir + ) if git_checkout_result.returncode == 0: subprocess.run(["git", "pull", "approved"], cwd=working_dir, check=True) # now checkout the branch holding the snapshot - subprocess.run(["git", "checkout", "-b", branch_name], cwd=working_dir, check=True) + subprocess.run( + ["git", "checkout", "-b", branch_name], cwd=working_dir, check=True + ) # Remove the contents of the cloned repo (everything except .git) for item in os.listdir(working_dir): @@ -439,8 +409,12 @@ def clone_commit_and_push( # Commit everything to this branch, also putting commit hash into message subprocess.run(["git", "add", "."], cwd=working_dir, check=True) - commit_msg = "Import snapshot of {} at commit {}".format(remote_url, commit_hash) - subprocess.run(["git", "commit", "-m", commit_msg], cwd=working_dir, check=True) + commit_msg = "Import snapshot of {} at commit {}".format( + remote_url, commit_hash + ) + subprocess.run( + ["git", "commit", "-m", commit_msg], cwd=working_dir, check=True + ) # Push back to gitlab review (unapproved) subprocess.run( ["git", "push", "-f", "--set-upstream", "origin", branch_name], @@ -451,54 +425,68 @@ def clone_commit_and_push( logger.info("Pushed to {} branch {}".format(remote_url, branch_name)) -def fork_project(repo_name, project_id, namespace_id, gitlab_url, gitlab_token): +def fork_project(gitlab_config, repo_name, project_id, namespace_id): already_exists = check_if_project_exists( - repo_name, namespace_id, gitlab_url, gitlab_token + gitlab_config, + repo_name, + namespace_id, + gitlab_config["api_url"], + gitlab_config["api_token"], ) if not already_exists: - fork_url = "{}/projects/{}/fork".format(gitlab_url, project_id) + fork_url = "{}/projects/{}/fork".format( + gitlab_config["api_url"], project_id + ) response = requests.post( fork_url, - headers={"Authorization": "Bearer " + gitlab_token}, + headers=gitlab_config["headers"], data={"namespace_id": namespace_id}, ) if response.status_code != 201: - raise RuntimeError("Problem creating fork: {}".format(response.content)) - new_project_info = response.json()#["id"] + raise http_error("Forking project {project_id}", response) + + new_project_info = response.json() else: - # project already exists - ensure it is a fork of 'approved/' + # project already exists - ensure it is a fork of + # 'approved/' new_project_info = get_or_create_project( - repo_name, namespace_id, gitlab_url, gitlab_token + gitlab_config, + repo_name, + namespace_id, + gitlab_config["api_url"], + gitlab_config["api_token"], ) new_project_id = new_project_info["id"] fork_url = "{}/projects/{}/fork/{}".format( - gitlab_url, new_project_id, project_id - ) - response = requests.post( - fork_url, headers={"Authorization": "Bearer " + gitlab_token} + gitlab_config["api_url"], new_project_id, project_id ) + response = requests.post(fork_url, headers=gitlab_config["headers"],) # status code 201 if fork relationship created, or 409 if already there if (response.status_code != 201) and (response.status_code != 409): - raise RuntimeError( - "Unable to create fork relationship: {} {}".format( - response.status_code, response.content - ) - ) + raise http_error("Creating fork relationship", response) + return new_project_info def unzipped_snapshot_to_merge_request( - shapshot_details, tmp_repo_dir, gitlab_config, namespace_ids, group_names + gitlab_config, snapshot_details, namespace_ids ): # unpack tuple - repo_name, commit_hash, target_branch_name, unzipped_location = shapshot_details - logger.info("Unpacked {} {} {}".format(repo_name, commit_hash, target_branch_name)) - # create project on approved repo if not already there - this func will do that - target_project_info = get_or_create_project( + ( repo_name, - namespace_ids[group_names[1]], - gitlab_config["api_url"], - gitlab_config["api_token"], + commit_hash, + target_branch_name, + source_git_url, + snapshot_path, + ) = snapshot_details + logger.info( + "Unpacked {} {} {}".format(repo_name, commit_hash, target_branch_name) + ) + + unzipped_location = os.path.join(snapshot_path, "repo") + + target_project_info = get_or_create_project( + gitlab_config, repo_name, namespace_ids["approved"], ) target_project_id = target_project_info["id"] target_project_url = target_project_info["ssh_url_to_repo"] @@ -510,14 +498,13 @@ def unzipped_snapshot_to_merge_request( # Fork this project to "unapproved" group src_project_info = fork_project( + gitlab_config, repo_name, target_project_id, - namespace_ids[group_names[0]], - gitlab_config["api_url"], - gitlab_config["api_token"], + namespace_ids["unapproved"], ) - src_project_id = src_project_info['id'] - remote_url = src_project_info['ssh_url_to_repo'] + src_project_id = src_project_info["id"] + remote_url = src_project_info["ssh_url_to_repo"] logger.info("Fork of project at {}/{}".format(group_names[0], repo_name)) # Do the command-line git stuff to push to unapproved project @@ -534,76 +521,45 @@ def unzipped_snapshot_to_merge_request( # Create the branch on the "approved" project if it doesn't already exist create_branch_if_not_exists( + gitlab_config, target_branch_name, target_project_id, - gitlab_config["api_url"], - gitlab_config["api_token"], - "{} / {}".format(group_names[1], repo_name), ## for logging + "{} / {}".format(group_names[1], repo_name), ## for logging ) # Create the merge request create_merge_request_if_not_exists( + gitlab_config, repo_name, src_project_id, src_branch_name, target_project_id, target_branch_name, - gitlab_config["api_url"], - gitlab_config["api_token"], ) - -def cleanup(zipfile_dir, tmp_unzipped_dir, tmp_repo_dir): - logger.info(" === cleaning up ======") - shutil.rmtree(tmp_unzipped_dir, ignore_errors=True) - logger.info("Removed directory {}".format(tmp_unzipped_dir)) - shutil.rmtree(tmp_repo_dir, ignore_errors=True) - logger.info("Removed directory {}".format(tmp_repo_dir)) - try: - for filename in os.listdir(zipfile_dir): - filepath = os.path.join(zipfile_dir, filename) - subprocess.run(["rm", "-f", filepath], check=True) - logger.info("Removed file {}".format(filepath)) - except (FileNotFoundError): - logger.info("Zipfile directory {} not found - skipping".format(zipfile_dir)) - return True + # cleanup this zipfile and its extracted contents + subprocess.run("rm", "-rf", snapshot_path, check=True) def main(): - ZIPFILE_DIR = "/tmp/zipfiles" - os.makedirs(ZIPFILE_DIR, exist_ok=True) - # create a directory to unpack the zipfiles into - TMP_UNZIPPED_DIR = "/tmp/unzipped" - shutil.rmtree(TMP_UNZIPPED_DIR, ignore_errors=True) - os.makedirs(TMP_UNZIPPED_DIR) - # and a directory where we will clone projects, then copy file contents in - TMP_REPO_DIR = "/tmp/repos" - shutil.rmtree(TMP_REPO_DIR, ignore_errors=True) - os.makedirs(TMP_REPO_DIR) - + zipfile_dir = "/tmp/zipfiles" # get the gitlab config - config = get_api_config("GITLAB-REVIEW") + gitlab_config = get_api_config("GITLAB-REVIEW") # unzip the zipfiles, and retrieve a list of tuples describing - # (repo_name, commit_hash, desired_branch, unzipped_location) - unzipped_snapshots = unzip_zipfiles(ZIPFILE_DIR, TMP_UNZIPPED_DIR) + # (repo_name, commit_hash, desired_branch, source_git_url, snapshot_path) + unzipped_snapshots = unzip_zipfiles(zipfile_dir) - # get the namespace_ids of our "approved" and "unapproved" groups - GROUPS = ["unapproved", "approved"] + gitlab_ingress_groups = ["unapproved", "approved"] namespace_ids = get_group_namespace_ids( - config["api_url"], config["api_token"], GROUPS + gitlab_config, gitlab_ingress_groups ) - # loop over all our newly unzipped repositories for snapshot_details in unzipped_snapshots: - # call function to go through all the project/branch/mr creation etc. unzipped_snapshot_to_merge_request( - snapshot_details, TMP_REPO_DIR, config, namespace_ids, GROUPS + gitlab_config, snapshot_details, namespace_ids ) - # cleanup - cleanup(ZIPFILE_DIR, TMP_UNZIPPED_DIR, TMP_REPO_DIR) - if __name__ == "__main__": main() From edbcc1f0208c5447c3f55fc6e03e5a5e6dcbe205 Mon Sep 17 00:00:00 2001 From: Oliver Strickson Date: Thu, 25 Jun 2020 16:03:06 +0100 Subject: [PATCH 104/155] Combine review steps scripts in crontab; enforce a single concurrent run --- .../cloud_init/cloud-init-gitlab-review.template.yaml | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-review.template.yaml b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-review.template.yaml index 6e1ee16a99..bc949fb7d5 100644 --- a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-review.template.yaml +++ b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-review.template.yaml @@ -210,10 +210,8 @@ runcmd: # -------------------------------- # ADD CRONTAB ENTRIES FOR GITLAB SCRIPTS # -------------------------------- - - echo "*** Adding zipfile_to_gitlab_project.py to crontab ***" - - echo "*/10 * * * * gitlabdaemon /home/gitlabdaemon/zipfile_to_gitlab_project.py" >> /etc/crontab - - echo "*** Adding check_merge_requests.py to crontab ***" - - echo "5,15,25,35,45,55 * * * * gitlabdaemon /home/gitlabdaemon/check_merge_requests.py" >> /etc/crontab + - echo "*** Adding review_steps.sh to crontab ***" + - echo "*/10 * * * * gitlabdaemon 'flock -n /tmp/review_steps.lock /home/gitlabdaemon/review_steps.sh'" >> /etc/crontab # -------------------------------- # GIVE gitlabdaemon OWNERSHIP OF THEIR HOME DIRECTORY # -------------------------------- From cde38a977a97bca5bbca45adf37849e2589904a5 Mon Sep 17 00:00:00 2001 From: Oliver Strickson Date: Thu, 25 Jun 2020 16:03:51 +0100 Subject: [PATCH 105/155] Move utilities to gitlab_config.py --- .../cloud_init/scripts/gitlab_config.py | 112 ++++++++++-------- .../cloud_init/scripts/requests_util.py | 8 -- 2 files changed, 64 insertions(+), 56 deletions(-) delete mode 100644 deployment/secure_research_environment/cloud_init/scripts/requests_util.py diff --git a/deployment/secure_research_environment/cloud_init/scripts/gitlab_config.py b/deployment/secure_research_environment/cloud_init/scripts/gitlab_config.py index 736521f76a..f384e85247 100755 --- a/deployment/secure_research_environment/cloud_init/scripts/gitlab_config.py +++ b/deployment/secure_research_environment/cloud_init/scripts/gitlab_config.py @@ -1,45 +1,15 @@ #!/usr/bin/env python3 import json -import argparse from pathlib import Path +from urllib.parse import quote as url_quote +import requests -# def get_gitlab_config(file=None, server=None, value=None): -# """Get GitLab server details and user secrets. - -# Parameters -# ---------- -# file : str, optional -# Path to configuration file, by default None which resolves to -# .secrets/gitlab-config.json in the user's home directory. -# server : str, optional -# Name of the server to get details for (must match format in config file), -# by default None which returns alls ervers. -# value : str, optional -# Name of the configuration value to return, by default None which returns -# all parameters. - -# Returns -# ------- -# dict or str -# If server and value are not None, str of the requested value. If only -# server or neither specified, dict of all the relevant values. -# """ -# if file is None: -# file = f"{Path.home()}/.secrets/gitlab-config.json" - -# with open(file, "r") as f: -# config = json.load(f) - -# if server is None and value is None: -# return config -# elif value is None: -# return config[server] -# elif server is None: -# raise ValueError("If value is given, server must also be given.") -# else: -# return config[server][value] - +def http_error(msg, response): + return requests.HTTPError( + msg + ": Unexpected response: " + response.reason + " (" + + response.status_code + "), content: " + response.text + ) def get_api_config(server, file=None): """Construct API URL, headers and other settings. @@ -63,21 +33,67 @@ def get_api_config(server, file=None): with open(file, "r") as f: config = json.load(f) - ip = config["ip_address"] - token = config["api_token"] + ip = config[server]["ip_address"] + token = config[server]["api_token"] api_url = f"http://{ip}/api/v4" headers = {"Authorization": "Bearer " + token} return {"api_url": api_url, "api_token": token, "ip": ip, "headers": headers} -# if __name__ == "__main__": -# parser = argparse.ArgumentParser(description="Get GitLab configuration values.") -# parser.add_argument("--file", help="Location of config file.", default=None) -# parser.add_argument( -# "--server", help="Name of server to get config for.", default=None -# ) -# parser.add_argument("--value", help="Configuration value to get.", default=None) -# args = parser.parse_args() +def get_project_by_id(config, project_id): + """Get the details of a project from its ID. + + Parameters + ---------- + project_id : int + ID of the project on the gitlab server. + config : dict + Gitlab details and secrets as returned by get_api_config + + Returns + ------- + dict + Project JSON as returned by the gitlab API. + """ + endpoint = config["api_url"] + f"/projects/{project_id}" + response = requests.get(endpoint, headers=config["headers"]) + + if response.status_code != 200: + raise http_error("Getting project", response) + + return response.json() + + +def get_project(config, namespace, repo_name): + # build url-encoded repo_name + repo_path_encoded = url_quote(namespace + "/" + repo_name, safe="") + + # Does repo_name exist? + response = requests.get( + config["api_url"] + "/projects/" + repo_path_encoded, + headers=config["headers"], + ) + + if response.status_code == 404: + return False + elif response.status_code == 200: + # The json response body is never empty for a project that + # exists (and so is "truthy") + return response.json() + else: + # Not using `response.raise_for_status()`, since we also want + # to raise an exception on unexpected "successful" responses + # (not 200) + raise http_error("Getting project", response) + + +def get_group_ids(gitlab_config): + groups_url = "{}/groups/".format(gitlab_config["api_url"]) + response = requests.get(groups_url, headers=gitlab_config["headers"]) + if response.status_code != 200: + raise http_error("Geting group namespace ids", response) + + gitlab_groups = response.json() -# print(get_gitlab_config(file=args.file, server=args.server, value=args.value)) + return {group["name"]: group["id"] for group in gitlab_groups} diff --git a/deployment/secure_research_environment/cloud_init/scripts/requests_util.py b/deployment/secure_research_environment/cloud_init/scripts/requests_util.py deleted file mode 100644 index 2efb64ea0a..0000000000 --- a/deployment/secure_research_environment/cloud_init/scripts/requests_util.py +++ /dev/null @@ -1,8 +0,0 @@ -import requests - - -def http_error(msg, response): - return requests.HTTPError( - msg + ": Unexpected response: " + response.reason + " (" - + response.status_code + "), content: " + resonse.text - ) From 58114093e3c002dce62f431a891796c0038f50d4 Mon Sep 17 00:00:00 2001 From: Oliver Strickson Date: Thu, 25 Jun 2020 16:05:08 +0100 Subject: [PATCH 106/155] WIP: continue refactor of gitlab ingress scripts --- .../scripts/check_merge_requests.py | 229 +++++------------ .../scripts/zipfile_to_gitlab_project.py | 241 ++++++------------ 2 files changed, 155 insertions(+), 315 deletions(-) diff --git a/deployment/secure_research_environment/cloud_init/scripts/check_merge_requests.py b/deployment/secure_research_environment/cloud_init/scripts/check_merge_requests.py index 1994063863..9b4df3d262 100644 --- a/deployment/secure_research_environment/cloud_init/scripts/check_merge_requests.py +++ b/deployment/secure_research_environment/cloud_init/scripts/check_merge_requests.py @@ -20,13 +20,11 @@ """ import sys -import requests import subprocess -from urllib.parse import quote as url_quote -from pathlib import Path import logging from logging.handlers import RotatingFileHandler -from gitlab_config import get_api_config +import requests +import gitlab_config as gl ## # Setup logging to console and file. File uses RotatingFileHandler to create @@ -52,54 +50,6 @@ accepted_mr_logger.addHandler(accepted_mr_handler) -def check_project_exists(repo_name, config): - """Determine whether a repo exist in the ingress namespace on - gitlab server defined by config. - - Parameters - ---------- - repo_name : str - The name of a repo (not a URL) to search for in the ingress namespace. - config : dict - Gitlab details and secrets as returned by get_api_config - - Returns - ------- - tuple - (exists, url) tuple where exists: boolean - does repo_name exist, - and url: str - the ssh url to the repo (when 'exists' is true) - - Raises - ------ - requests.HTTPError - If API request returns an unexpected code (not 404 or 200) - """ - - # build url-encoded repo_name - repo_path_encoded = url_quote("ingress/" + repo_name, safe="") - - # Does repo_name exist? - response = requests.get( - config["api_url"] + "/projects/" + repo_path_encoded, - headers=config["headers"], - ) - - if response.status_code == 404: - return (False, "") - elif response.status_code == 200: - return (True, response.json()["ssh_url_to_repo"]) - else: - # Not using `response.raise_for_status()`, since we also want - # to raise an exception on unexpected "successful" responses - # (not 200) - raise requests.HTTPError( - "Unexpected response: " - + response.reason - + ", content: " - + response.text - ) - - def update_repo(git_url, repo_name, branch_name, config): """Takes a git URL, `git_url`, which should be the SSH URL to the "APPROVED" repo on GITLAB-REVIEW, clones it and pushes all branches to @@ -123,80 +73,31 @@ def update_repo(git_url, repo_name, branch_name, config): subprocess.run(["git", "clone", git_url, repo_name], check=True) subprocess.run(["git", "checkout", branch_name], cwd=repo_name, check=True) - project_exists, gl_update_repo_url = check_project_exists(repo_name, config) + maybe_project = gl.get_project(config, "ingress", repo_name) # create the project if it doesn't exist - if not project_exists: - print("Creating: " + repo_name) + if maybe_project: + update_repo_url = maybe_project["ssh_url_to_repo"] + else: + logger.info("Creating: %s", repo_name) + response = requests.post( config["api_url"] + "/projects", headers=config["headers"], data={"name": repo_name, "path": repo_name, "visibility": "public"}, ) - response.raise_for_status() - assert response.json()["path_with_namespace"] == "ingress/" + repo_name - gl_update_repo_url = response.json()["ssh_url_to_repo"] + update_repo_url = response.json()["ssh_url_to_repo"] # Set the remote subprocess.run( - ["git", "remote", "add", "gitlab", gl_update_repo_url], + ["git", "remote", "add", "gitlab", update_repo_url], cwd=repo_name, check=True, ) # Force push current contents of all branches - subprocess.run( - ["git", "push", "--force", "gitlab"], cwd=repo_name, check=True - ) - - -def get_group_id(group_name, config): - """Get the ID of a group on a gitlab server. - - Parameters - ---------- - group_name : str - Group name to find. - config : dict - Gitlab details and secrets as returned by get_api_config - - Returns - ------- - int - Group ID for group_name - - Raises - ------ - ValueError - If group_name not found in the groups returned from the gitlab server. - """ - endpoint = config["api_url"] + "/groups" - response = get_request(endpoint, headers=config["headers"]) - for group in response: - if group["name"] == group_name: - return group["id"] - raise ValueError(f"{group_name} not found in groups.") - - -def get_project(project_id, config): - """Get the details of a project from its ID. - - Parameters - ---------- - project_id : int - ID of the project on the gitlab server. - config : dict - Gitlab details and secrets as returned by get_api_config - - Returns - ------- - dict - Project JSON as returned by the gitlab API. - """ - endpoint = config["api_url"] + f"/projects/{project_id}" - project = get_request(endpoint, headers=config["headers"]) - return project + subprocess.run(["git", "push", "--force", "gitlab"], cwd=repo_name, check=True) def get_merge_requests_for_approval(config): @@ -213,18 +114,22 @@ def get_merge_requests_for_approval(config): list List of merge requests JSONs as returned by the gitlab API. """ - group = get_group_id("approved", config) + all_groups = gl.get_group_ids(config) + group = all_groups["approved"] endpoint = config["api_url"] + f"/groups/{group}/merge_requests" - response = get_request( + response = requests.get( endpoint, headers=config["headers"], - params={"state": "opened", "scope": "created_by_me"}, + data={"state": "opened", "scope": "created_by_me"}, ) - return response + if response.status_code != 200: + raise gl.http_error("Getting merge requests for approval", response) + + return response.json() -def count_unresolved_mr_discussions(mr, config): - """Count the number of unresolved discussions a merge request has. Requires +def unresolved_mr_discussions(config, mr): + """Does merge request `mr` have any unresolved discussions? Requires calling the discussions API endpoint for the merge request to determine each comment's resolved status. @@ -237,8 +142,7 @@ def count_unresolved_mr_discussions(mr, config): Returns ------- - int - Number of unresolved discussions. + bool : does mr have any unresolved discussions? """ if mr["user_notes_count"] == 0: return 0 @@ -248,16 +152,17 @@ def count_unresolved_mr_discussions(mr, config): config["api_url"] + f"/projects/{project_id}/merge_requests/{mr_iid}/discussions" ) - discussions = get_request(endpoint, headers=config["headers"]) - if len(discussions) == 0: - return 0 - else: - n_unresolved = 0 - for d in discussions: - for n in d["notes"]: - if n["resolvable"] is True and n["resolved"] is False: - n_unresolved += 1 - return n_unresolved + response = requests.get(endpoint, headers=config["headers"]) + if response.status_code != 200: + raise gl.http_error("Getting unresolved merge request discussions", response) + + discussions = response.json() + + for d in discussions: + for n in d["notes"]: + if n["resolvable"] is True and n["resolved"] is False: + return True + return False def accept_merge_request(config, mr): @@ -286,15 +191,15 @@ def accept_merge_request(config, mr): response = requests.put(endpoint, headers=config["headers"]) if response.status_code != 200: - raise http_error("Accepting merge request", response) + raise gl.http_error("Accepting merge request", response) return response.json() def merge_allowed(config_gitlabreview, mr): - unresolved = count_unresolved_mr_discussions(mr, config_gitlabreview) + unresolved = unresolved_mr_discussions(config_gitlabreview, mr) checks = { - "unresolved_check": unresolved == 0, + "unresolved_check": not unresolved, "upvotes_check": mr["upvotes"] >= 2, "downvotes_check": mr["downvotes"] == 0, } @@ -306,10 +211,10 @@ def handle_all_merge_requests(): approve them where appropriate, and then push the approved repos to the normal gitlab server for users. """ - logger.info(f"STARTING RUN") + logger.info("STARTING RUN") - config_gitlabreview = get_api_config(server="GITLAB-REVIEW") - config_gitlab = get_api_config(server="GITLAB") + config_gitlabreview = gl.get_api_config(server="GITLAB-REVIEW") + config_gitlab = gl.get_api_config(server="GITLAB") response = requests.get( config_gitlab["api_url"] + "/projects", @@ -317,20 +222,20 @@ def handle_all_merge_requests(): timeout=10, ) if response.status_code != 200: - raise http_error("Getting project list", response) + raise gl.http_error("Getting project list", response) logger.info("Getting open merge requests for approval") ## TODO throw in get_merge_requests_for_approval merge_requests = get_merge_requests_for_approval(config_gitlabreview) - logger.info(f"Found {len(merge_requests)} open merge requests") + logger.info("Found %s open merge requests", len(merge_requests)) mr_errors_encountered = 0 for i, mr in enumerate(merge_requests): logger.info("-" * 20) - logger.info(f"Merge request {i+1} of {len(merge_requests)}") - logger.info(f"Checking merge request {mr}") + logger.info("Merge request %s of %s", i+1, len(merge_requests)) + logger.info("Checking merge request %s", mr) if mr["merge_status"] != "can_be_merged": logger.error( @@ -345,39 +250,42 @@ def handle_all_merge_requests(): can_merge, merge_checks = merge_allowed(config_gitlabreview, mr) if can_merge: logger.info("Merge request has been approved. Proceeding with merge.") - source_project = get_project(config_gitlabreview, mr["source_project_id"]) - target_project = get_project(config_gitlabreview, mr["project_id"]) + source_project = gl.get_project_by_id(config_gitlabreview, mr["source_project_id"]) + target_project = gl.get_project_by_id(config_gitlabreview, mr["project_id"]) merge_result = accept_merge_request(config_gitlabreview, mr) logger.info("Merge completed") accepted_mr_logger.info( - f"{merge_result['merged_at']}, " - f"{source_project['name_with_namespace']}, " - f"{mr['source_branch']}, " - f"{mr['sha']}, " - f"{target_project['name_with_namespace']}, " - f"{mr['target_branch']}, " - f"{merge_result['merge_commit_sha']}" + "%s, %s, %s, %s, %s, %s, %s", + merge_result['merged_at'], + source_project['name_with_namespace'], + mr['source_branch'], + mr['sha'], + target_project['name_with_namespace'], + mr['target_branch'], + merge_result['merge_commit_sha'], ) logger.info("Pushing project to gitlab user server.") update_repo( - config_gitlab + config_gitlab, target_project["ssh_url_to_repo"], target_project["name"], - target_branch, + mr['target_branch'], ) logger.info("Done") else: logger.info( - f"Merge request has not been approved: skipping. Reason: {merge_checks}" + "Merge request has not been approved: skipping. Reason: %s", merge_checks ) ## Errors from GitLab requests and subprocess - except requests.HTTPError, subprocess.CalledProcessError: + except (requests.HTTPError, subprocess.CalledProcessError): logger.exception( - f"Handling merge request failed for {mr}. Attempting to continue with remaining merge requests." + "Handling merge request failed for %s. Attempting to continue " + "with remaining merge requests.", + mr ) mr_errors_encountered += 1 continue @@ -388,15 +296,18 @@ def handle_all_merge_requests(): return mr_errors_encountered - -if __name__ == "__main__": - +def main(): try: - mr_errors_encountered = handle_all_merge_requests() - except Exception as e: + mr_errors = handle_all_merge_requests() + except Exception: logger.exception("Error handling merge requests") raise - return_code = 0 if mr_errors_encountered == 0 else 1 + return_code = 0 if mr_errors == 0 else 1 + + return return_code - sys.exit(return_code) + +if __name__ == "__main__": + exit_status = main() + sys.exit(exit_status) diff --git a/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py b/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py index ec49a2a46e..a1cfd54d6b 100644 --- a/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py +++ b/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py @@ -1,17 +1,18 @@ #!/usr/bin/env python3 + +# pylint: disable=C0330 +# pylint: disable=C0114 +# pylint: disable=C0116 +# pylint: disable=C0103 + import os -import shutil -import re -import requests import subprocess -import tempfile from zipfile import ZipFile, BadZipFile -from urllib.parse import quote as url_quote from pathlib import Path import logging from logging.handlers import RotatingFileHandler -from gitlab_config import get_api_config -from requests_util import http_error +import requests +import gitlab_config as gl logger = logging.getLogger("project_upload_logger") logger.setLevel(logging.INFO) @@ -30,10 +31,8 @@ def unzip_zipfiles(zipfile_dir): output_list = [] try: zipfile_subdirs = os.listdir(zipfile_dir) - except (FileNotFoundError): - logger.info( - f"Zipfile dir {zipfile_dir} not found - assuming nothing to unzip" - ) + except FileNotFoundError: + logger.info("Zipfile dir %s not found - assuming nothing to unzip", zipfile_dir) return [] for d in zipfile_subdirs: @@ -47,9 +46,7 @@ def unzip_zipfiles(zipfile_dir): zip_obj.extractall(path=zipfile_subdir) repo_details = [ - Path(os.path.join(unpacked_location, fname)) - .read_text() - .rstrip() + Path(os.path.join(unpacked_location, fname)).read_text().rstrip() for fname in ( "targetRepoName", "sourceCommitHash", @@ -58,54 +55,15 @@ def unzip_zipfiles(zipfile_dir): ) ] output_list.append((*repo_details, zipfile_subdir)) - except (BadZipFile, FileNotFoundError, IsADirectoryError) as e: + except (BadZipFile, FileNotFoundError, IsADirectoryError): logger.exception( - f"Error when processing zipfile at {zipfile_subdir}. Continuing with remaining zipfiles." + "Error when processing zipfile at %s. Continuing with remaining zipfiles.", + zipfile_subdir ) continue return output_list -def get_group_namespace_ids(gitlab_config, groups=["approved", "unapproved"]): - namespaces_url = "{}/namespaces/".format(gitlab_config["api_url"]) - response = requests.get(namespaces_url, headers=gitlab_config["headers"]) - if response.status_code != 200: - raise http_error("Geting group namespace ids", response) - - gitlab_namespaces = response.json() - - return { - namespace["name"]: namespace["id"] - for namespace in gitlab_namespaces - if namespace["kind"] == "group" and namespace["name"] in groups - } - - -def get_gitlab_project_list(gitlab_config): - projects_url = "{}/projects/".format(gitlab_config["api_url"]) - response = requests.get( - projects_url, - headers=gitlab_config["headers"], - params={"owned": True, "simple": True}, - ) - if response.status_code != 200: - raise http_error("Getting project list", response) - - gitlab_projects = response.json() - return gitlab_projects - - -def check_if_project_exists(gitlab_config, repo_name, namespace_id): - projects = get_gitlab_project_list(gitlab_config) - for project in projects: - if ( - project["name"] == repo_name - and project["namespace"]["id"] == namespace_id - ): - return project - return False - - def create_project(gitlab_config, repo_name, namespace_id): projects_url = "{}/projects/".format(gitlab_config["api_url"]) response = requests.post( @@ -121,13 +79,14 @@ def create_project(gitlab_config, repo_name, namespace_id): ) if response.status_code != 200: - raise http_error("Creating project", response) + raise gl.http_error("Creating project", response) project_info = response.json() logger.info( - "Created project {} in namespace {}, project_id {}".format( - repo_name, namespace_id, project_info["id"] - ) + "Created project %s in namespace %s, project_id %s", + repo_name, + namespace_id, + project_info["id"], ) # make the initial commit of README initialized with some instructions README = f""" @@ -188,27 +147,22 @@ def create_project(gitlab_config, repo_name, namespace_id): "branch": "_gitlab_ingress_review", "commit_message": "Initial commit", "actions": [ - { - "action": "create", - "file_path": "README.md", - "content": README, - } + {"action": "create", "file_path": "README.md", "content": README,} ], }, ) if response_commit.status_code != 201: - raise http_error("Making first commit to project", response_commit) + raise gl.http_error("Making first commit to project", response_commit) return project_info -def get_or_create_project(gitlab_config, repo_name, namespace_id): - project = check_if_project_exists(gitlab_config, repo_name, namespace_id) +def get_or_create_project(config, namespace_ids, namespace, repo_name): + project = gl.get_project(config, namespace, repo_name) if project: return project - else: - return create_project(gitlab_config, repo_name, namespace_id) + return create_project(config, repo_name, namespace_ids[namespace]) def check_if_branch_exists(gitlab_config, branch_name, project_id): @@ -217,9 +171,8 @@ def check_if_branch_exists(gitlab_config, branch_name, project_id): ) response = requests.get(branches_url, headers=gitlab_config["headers"]) if response.status_code != 200: - raise http_error( - f"Checking for branch {branch_name} on project {project_id}", - response, + raise gl.http_error( + f"Checking for branch {branch_name} on project {project_id}", response, ) branches = response.json() @@ -230,14 +183,11 @@ def check_if_branch_exists(gitlab_config, branch_name, project_id): def create_branch_if_not_exists( - gitlab_config, - branch_name, - project_id, - reference_branch="_gitlab_ingress_review", + gitlab_config, branch_name, project_id, reference_branch="_gitlab_ingress_review", ): branch = check_if_branch_exists(gitlab_config, branch_name, project_id) if branch: - logger.info(f"Branch {branch_name} exists for project {project_id}") + logger.info("Branch %s exists for project %s", branch_name, project_id) return branch ## Branch does not exist @@ -250,27 +200,23 @@ def create_branch_if_not_exists( data={"branch": branch_name, "ref": reference_branch}, ) if response.status_code != 201: - raise http_error(f"Creating branch {branch_name}", response) + raise gl.http_error(f"Creating branch {branch_name}", response) - logger.info(f"Branch {branch_name} created for project {project_id}") + logger.info("Branch %s created for project %s", branch_name, project_id) branch_info = response.json() return branch_info def check_if_merge_request_exists( - gitlab_config, - source_project_id, - source_branch, - target_project_id, - target_branch, + gitlab_config, source_project_id, source_branch, target_project_id, target_branch, ): mr_url = "{}/projects/{}/merge_requests".format( gitlab_config["api_url"], target_project_id ) response = requests.get(mr_url, headers=gitlab_config["headers"]) if response.status_code != 200: - raise http_error("Request to check existence of MR failed", response) + raise gl.http_error("Request to check existence of MR failed", response) merge_requests = response.json() @@ -303,7 +249,7 @@ def create_merge_request_if_not_exists( target_branch, ) if mr: - logger.info(f"Merge Request for {repo_name} already exists") + logger.info("Merge Request for %s already exists", repo_name) return mr ## Ensure fork relationship is established @@ -315,7 +261,7 @@ def create_merge_request_if_not_exists( # status code 201 if fork relationship created, or 409 if already there if (response.status_code != 201) and (response.status_code != 409): - raise http_error( + raise gl.http_error( f"Creating fork request between projects {source_project_id} and {target_project_id}", response, ) @@ -346,20 +292,22 @@ def create_merge_request_if_not_exists( if mr and response.status_code == 500: logger.error( - f"Response 500 ({response.reason}) returned when creating merge request for {repo_name}, although the merge request was created. This may not signal a genuine problem." + "Response 500 (%s) returned when creating merge request for %s, although " + "the merge request was created. This may not signal a genuine problem.", + response.reason, + repo_name ) return mr - elif response.status_code != 201: - raise http_error(f"Creating merge request for {repo_name}", response) + if response.status_code != 201: + raise gl.http_error(f"Creating merge request for {repo_name}", response) - logger.info(f"Created merge request {source_branch} -> {target_branch}") + logger.info("Created merge request %s -> %s", source_branch, target_branch) mr_info = response.json() return mr_info def clone_commit_and_push( - repo_name, path_to_unzipped_repo, tmp_repo_dir, branch_name, @@ -369,8 +317,8 @@ def clone_commit_and_push( commit_hash, ): # Clone the repo - subprocess.run(["git", "clone", remote_url], cwd=tmp_repo_dir, check=True) - working_dir = os.path.join(tmp_repo_dir, repo_name) + subprocess.run(["git", "clone", remote_url, "cloned_repo"], cwd=tmp_repo_dir, check=True) + working_dir = os.path.join(tmp_repo_dir, "cloned_repo") assert os.path.exists(working_dir) # Add upstream (target repo) to this repo @@ -381,18 +329,15 @@ def clone_commit_and_push( ) subprocess.run(["git", "fetch", "approved"], cwd=working_dir, check=True) - # Checkout the branch with the requested name (creating it at the - # current commit of the default branch if it doesn't exist) + # Checkout the target branch if it exists git_checkout_result = subprocess.run( - ["git", "checkout", target_branch_name], cwd=working_dir + ["git", "checkout", target_branch_name], cwd=working_dir, check=False ) if git_checkout_result.returncode == 0: subprocess.run(["git", "pull", "approved"], cwd=working_dir, check=True) # now checkout the branch holding the snapshot - subprocess.run( - ["git", "checkout", "-b", branch_name], cwd=working_dir, check=True - ) + subprocess.run(["git", "checkout", "-b", branch_name], cwd=working_dir, check=True) # Remove the contents of the cloned repo (everything except .git) for item in os.listdir(working_dir): @@ -409,12 +354,8 @@ def clone_commit_and_push( # Commit everything to this branch, also putting commit hash into message subprocess.run(["git", "add", "."], cwd=working_dir, check=True) - commit_msg = "Import snapshot of {} at commit {}".format( - remote_url, commit_hash - ) - subprocess.run( - ["git", "commit", "-m", commit_msg], cwd=working_dir, check=True - ) + commit_msg = "Import snapshot of {} at commit {}".format(remote_url, commit_hash) + subprocess.run(["git", "commit", "-m", commit_msg], cwd=working_dir, check=True) # Push back to gitlab review (unapproved) subprocess.run( ["git", "push", "-f", "--set-upstream", "origin", branch_name], @@ -422,55 +363,47 @@ def clone_commit_and_push( check=True, ) - logger.info("Pushed to {} branch {}".format(remote_url, branch_name)) + logger.info("Pushed to %s, branch %s", remote_url, branch_name) -def fork_project(gitlab_config, repo_name, project_id, namespace_id): - already_exists = check_if_project_exists( - gitlab_config, - repo_name, - namespace_id, - gitlab_config["api_url"], - gitlab_config["api_token"], - ) - if not already_exists: +def fork_project( + gitlab_config, + fork_namespace, + repo_name, + orig_project_id, + fork_namespace_id, +): + maybe_fork_project = gl.get_project(gitlab_config, fork_namespace, repo_name) + if not maybe_fork_project: fork_url = "{}/projects/{}/fork".format( - gitlab_config["api_url"], project_id + gitlab_config["api_url"], orig_project_id ) response = requests.post( fork_url, headers=gitlab_config["headers"], - data={"namespace_id": namespace_id}, + data={"namespace_id": fork_namespace_id}, ) if response.status_code != 201: - raise http_error("Forking project {project_id}", response) + raise gl.http_error(f"Forking project {orig_project_id}", response) - new_project_info = response.json() + fork_project_info = response.json() else: # project already exists - ensure it is a fork of # 'approved/' - new_project_info = get_or_create_project( - gitlab_config, - repo_name, - namespace_id, - gitlab_config["api_url"], - gitlab_config["api_token"], - ) - new_project_id = new_project_info["id"] + fork_project_info = maybe_fork_project + fork_project_id = fork_project_info["id"] fork_url = "{}/projects/{}/fork/{}".format( - gitlab_config["api_url"], new_project_id, project_id + gitlab_config["api_url"], fork_project_id, orig_project_id ) - response = requests.post(fork_url, headers=gitlab_config["headers"],) + response = requests.post(fork_url, headers=gitlab_config["headers"]) # status code 201 if fork relationship created, or 409 if already there if (response.status_code != 201) and (response.status_code != 409): - raise http_error("Creating fork relationship", response) + raise gl.http_error("Creating fork relationship", response) - return new_project_info + return fork_project_info -def unzipped_snapshot_to_merge_request( - gitlab_config, snapshot_details, namespace_ids -): +def unzipped_snapshot_to_merge_request(gitlab_config, snapshot_details, namespace_ids): # unpack tuple ( repo_name, @@ -479,18 +412,17 @@ def unzipped_snapshot_to_merge_request( source_git_url, snapshot_path, ) = snapshot_details - logger.info( - "Unpacked {} {} {}".format(repo_name, commit_hash, target_branch_name) - ) + + logger.info("Unpacked %s %s %s", repo_name, commit_hash, target_branch_name) unzipped_location = os.path.join(snapshot_path, "repo") target_project_info = get_or_create_project( - gitlab_config, repo_name, namespace_ids["approved"], + gitlab_config, "approved", repo_name, namespace_ids, ) target_project_id = target_project_info["id"] target_project_url = target_project_info["ssh_url_to_repo"] - logger.info("Created project {}/{} ".format(group_names[1], repo_name)) + logger.info("Created project approved/%s ", repo_name) # Branch to create on the source (unapproved) repository of the # matches that of the target @@ -499,19 +431,19 @@ def unzipped_snapshot_to_merge_request( # Fork this project to "unapproved" group src_project_info = fork_project( gitlab_config, - repo_name, - target_project_id, - namespace_ids["unapproved"], + fork_namespace="unapproved", + repo_name=repo_name, + orig_project_id=target_project_id, + fork_namespace_id=namespace_ids["unapproved"], ) src_project_id = src_project_info["id"] remote_url = src_project_info["ssh_url_to_repo"] - logger.info("Fork of project at {}/{}".format(group_names[0], repo_name)) + logger.info("Fork of project at unapproved/%s", repo_name) # Do the command-line git stuff to push to unapproved project clone_commit_and_push( - repo_name, unzipped_location, - tmp_repo_dir, + snapshot_path, src_branch_name, target_branch_name, remote_url, @@ -524,7 +456,7 @@ def unzipped_snapshot_to_merge_request( gitlab_config, target_branch_name, target_project_id, - "{} / {}".format(group_names[1], repo_name), ## for logging + "unapproved / {}".format(repo_name), ## for logging ) # Create the merge request @@ -544,21 +476,18 @@ def unzipped_snapshot_to_merge_request( def main(): zipfile_dir = "/tmp/zipfiles" # get the gitlab config - gitlab_config = get_api_config("GITLAB-REVIEW") + gitlab_config = gl.get_api_config("GITLAB-REVIEW") # unzip the zipfiles, and retrieve a list of tuples describing # (repo_name, commit_hash, desired_branch, source_git_url, snapshot_path) unzipped_snapshots = unzip_zipfiles(zipfile_dir) - gitlab_ingress_groups = ["unapproved", "approved"] - namespace_ids = get_group_namespace_ids( - gitlab_config, gitlab_ingress_groups - ) + group_ids = gl.get_group_ids(gitlab_config) + + # TODO check "approved" and "unapproved" key in namespace_ids for snapshot_details in unzipped_snapshots: - unzipped_snapshot_to_merge_request( - gitlab_config, snapshot_details, namespace_ids - ) + unzipped_snapshot_to_merge_request(gitlab_config, snapshot_details, group_ids) if __name__ == "__main__": From 4514a98e8a8b65f677e3044ce7dc4cc2935f0a2f Mon Sep 17 00:00:00 2001 From: Oliver Strickson Date: Fri, 26 Jun 2020 14:11:43 +0100 Subject: [PATCH 107/155] Remove stray pylint headers --- .../cloud_init/scripts/zipfile_to_gitlab_project.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py b/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py index a1cfd54d6b..b046de1144 100644 --- a/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py +++ b/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py @@ -1,10 +1,5 @@ #!/usr/bin/env python3 -# pylint: disable=C0330 -# pylint: disable=C0114 -# pylint: disable=C0116 -# pylint: disable=C0103 - import os import subprocess from zipfile import ZipFile, BadZipFile From 274c93bb1e8e83a2a2b3eced42261a9a209b8aa1 Mon Sep 17 00:00:00 2001 From: Oliver Strickson Date: Fri, 26 Jun 2020 14:24:37 +0100 Subject: [PATCH 108/155] Handle 'successful' 500 and 201 returns in a similar way in merge request creation --- .../scripts/zipfile_to_gitlab_project.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py b/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py index b046de1144..848c69ac3d 100644 --- a/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py +++ b/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py @@ -285,21 +285,21 @@ def create_merge_request_if_not_exists( target_branch, ) + if mr and response.status_code == 201: + logger.info("Created merge request %s -> %s", source_branch, target_branch) + return mr + if mr and response.status_code == 500: logger.error( - "Response 500 (%s) returned when creating merge request for %s, although " - "the merge request was created. This may not signal a genuine problem.", + "Response 500 (%s) returned when creating merge request for %s -> %s, " + "although the merge request was determined to have been created successfully " + "so this may not signal a genuine problem.", response.reason, repo_name ) return mr - if response.status_code != 201: - raise gl.http_error(f"Creating merge request for {repo_name}", response) - - logger.info("Created merge request %s -> %s", source_branch, target_branch) - mr_info = response.json() - return mr_info + raise gl.http_error(f"Creating merge request for {repo_name}", response) def clone_commit_and_push( From 44160038f2e926f1f0234257f49b250b3b3ac2fd Mon Sep 17 00:00:00 2001 From: Oliver Strickson Date: Fri, 26 Jun 2020 15:00:20 +0100 Subject: [PATCH 109/155] Adjust crontab entry for gitlab script --- .../cloud_init/cloud-init-gitlab-review.template.yaml | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-review.template.yaml b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-review.template.yaml index bc949fb7d5..0188d173fa 100644 --- a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-review.template.yaml +++ b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-review.template.yaml @@ -210,8 +210,10 @@ runcmd: # -------------------------------- # ADD CRONTAB ENTRIES FOR GITLAB SCRIPTS # -------------------------------- - - echo "*** Adding review_steps.sh to crontab ***" - - echo "*/10 * * * * gitlabdaemon 'flock -n /tmp/review_steps.lock /home/gitlabdaemon/review_steps.sh'" >> /etc/crontab + # run zipfile_to_gitlab_project.py and check_merge_requests.py sequentially, and with a lock, to prevent + # them from interfering. Return an error code + - echo "*** Adding zipfile_to_gitlab_project.py and check_merge_requests.py to crontab ***" + - echo "*/10 * * * * gitlabdaemon \"flock -n /tmp/review_steps.lock bash -c 'EXIT_STATUS=0; zipfile_to_gitlab_project.py || EXIT_STATUS=$?; check_merge_requests.py || EXIT_STATUS=$?; exit $EXIT_STATUS' \" " >> /etc/crontab # -------------------------------- # GIVE gitlabdaemon OWNERSHIP OF THEIR HOME DIRECTORY # -------------------------------- From c147ba749282963e39b0e2e5853099e6f2837e27 Mon Sep 17 00:00:00 2001 From: Oliver Strickson Date: Fri, 26 Jun 2020 15:28:42 +0100 Subject: [PATCH 110/155] Lint --- .../cloud_init/scripts/check_merge_requests.py | 12 +++++------- .../cloud_init/scripts/gitlab_config.py | 2 ++ .../scripts/zipfile_to_gitlab_project.py | 18 +++++++++--------- 3 files changed, 16 insertions(+), 16 deletions(-) diff --git a/deployment/secure_research_environment/cloud_init/scripts/check_merge_requests.py b/deployment/secure_research_environment/cloud_init/scripts/check_merge_requests.py index 9b4df3d262..4a844fdf1a 100644 --- a/deployment/secure_research_environment/cloud_init/scripts/check_merge_requests.py +++ b/deployment/secure_research_environment/cloud_init/scripts/check_merge_requests.py @@ -149,8 +149,7 @@ def unresolved_mr_discussions(config, mr): project_id = mr["project_id"] mr_iid = mr["iid"] endpoint = ( - config["api_url"] - + f"/projects/{project_id}/merge_requests/{mr_iid}/discussions" + config["api_url"] + f"/projects/{project_id}/merge_requests/{mr_iid}/discussions" ) response = requests.get(endpoint, headers=config["headers"]) if response.status_code != 200: @@ -185,8 +184,7 @@ def accept_merge_request(config, mr): project_id = mr["project_id"] mr_iid = mr["iid"] endpoint = ( - config["api_url"] - + f"/projects/{project_id}/merge_requests/{mr_iid}/merge" + config["api_url"] + f"/projects/{project_id}/merge_requests/{mr_iid}/merge" ) response = requests.put(endpoint, headers=config["headers"]) @@ -226,7 +224,7 @@ def handle_all_merge_requests(): logger.info("Getting open merge requests for approval") - ## TODO throw in get_merge_requests_for_approval + # TODO throw in get_merge_requests_for_approval merge_requests = get_merge_requests_for_approval(config_gitlabreview) logger.info("Found %s open merge requests", len(merge_requests)) @@ -234,7 +232,7 @@ def handle_all_merge_requests(): mr_errors_encountered = 0 for i, mr in enumerate(merge_requests): logger.info("-" * 20) - logger.info("Merge request %s of %s", i+1, len(merge_requests)) + logger.info("Merge request %s of %s", i + 1, len(merge_requests)) logger.info("Checking merge request %s", mr) if mr["merge_status"] != "can_be_merged": @@ -280,7 +278,7 @@ def handle_all_merge_requests(): "Merge request has not been approved: skipping. Reason: %s", merge_checks ) - ## Errors from GitLab requests and subprocess + # Errors from GitLab requests and subprocess except (requests.HTTPError, subprocess.CalledProcessError): logger.exception( "Handling merge request failed for %s. Attempting to continue " diff --git a/deployment/secure_research_environment/cloud_init/scripts/gitlab_config.py b/deployment/secure_research_environment/cloud_init/scripts/gitlab_config.py index f384e85247..4191c7f10e 100755 --- a/deployment/secure_research_environment/cloud_init/scripts/gitlab_config.py +++ b/deployment/secure_research_environment/cloud_init/scripts/gitlab_config.py @@ -5,12 +5,14 @@ from urllib.parse import quote as url_quote import requests + def http_error(msg, response): return requests.HTTPError( msg + ": Unexpected response: " + response.reason + " (" + response.status_code + "), content: " + response.text ) + def get_api_config(server, file=None): """Construct API URL, headers and other settings. diff --git a/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py b/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py index 848c69ac3d..5b0a7917c4 100644 --- a/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py +++ b/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py @@ -34,7 +34,7 @@ def unzip_zipfiles(zipfile_dir): zipfile_subdir = os.path.join(zipfile_dir, d) zipfile_path = os.path.join(zipfile_subdir, "repo.zip") unpacked_location = os.path.join(zipfile_subdir, "repo") - ## ensure "repo" does not already exist (from a previous failed attempt) + # ensure "repo" does not already exist (from a previous failed attempt) subprocess.run("rm", "-rf", unpacked_location, check=True) try: with ZipFile(zipfile_path, "r") as zip_obj: @@ -142,7 +142,7 @@ def create_project(gitlab_config, repo_name, namespace_id): "branch": "_gitlab_ingress_review", "commit_message": "Initial commit", "actions": [ - {"action": "create", "file_path": "README.md", "content": README,} + {"action": "create", "file_path": "README.md", "content": README} ], }, ) @@ -185,7 +185,7 @@ def create_branch_if_not_exists( logger.info("Branch %s exists for project %s", branch_name, project_id) return branch - ## Branch does not exist + # Branch does not exist branch_url = "{}/projects/{}/repository/branches".format( gitlab_config["api_url"], project_id ) @@ -215,7 +215,7 @@ def check_if_merge_request_exists( merge_requests = response.json() - ## return the (known unique) merge request if found, otherwise False + # return the (known unique) merge request if found, otherwise False for mr in merge_requests: if ( mr["source_branch"] == source_branch @@ -234,7 +234,7 @@ def create_merge_request_if_not_exists( target_project_id, target_branch, ): - ## Check whether requested MR exists, return it if it does + # Check whether requested MR exists, return it if it does mr = check_if_merge_request_exists( gitlab_config, @@ -247,7 +247,7 @@ def create_merge_request_if_not_exists( logger.info("Merge Request for %s already exists", repo_name) return mr - ## Ensure fork relationship is established + # Ensure fork relationship is established fork_url = "{}/projects/{}/fork/{}".format( gitlab_config["api_url"], source_project_id, target_project_id @@ -275,8 +275,8 @@ def create_merge_request_if_not_exists( }, ) - ## Check explicitly whether merge request exists, since sometimes - ## a 500 status is spuriously returned + # Check explicitly whether merge request exists, since sometimes + # a 500 status is spuriously returned mr = check_if_merge_request_exists( gitlab_config, source_project_id, @@ -451,7 +451,7 @@ def unzipped_snapshot_to_merge_request(gitlab_config, snapshot_details, namespac gitlab_config, target_branch_name, target_project_id, - "unapproved / {}".format(repo_name), ## for logging + "unapproved / {}".format(repo_name), # for logging ) # Create the merge request From be37161b91282c36688999d5dd87a84caebcd048 Mon Sep 17 00:00:00 2001 From: Oliver Strickson Date: Fri, 26 Jun 2020 16:17:33 +0100 Subject: [PATCH 111/155] Add some docstrings back; remove the 'docstrings only' file --- .../scripts/zipfile_to_gitlab_project.py | 106 +++++++ .../scripts/zipfile_to_gitlab_project_doc.py | 264 ------------------ 2 files changed, 106 insertions(+), 264 deletions(-) delete mode 100644 deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project_doc.py diff --git a/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py b/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py index 5b0a7917c4..b6c13d89ab 100644 --- a/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py +++ b/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py @@ -60,6 +60,21 @@ def unzip_zipfiles(zipfile_dir): def create_project(gitlab_config, repo_name, namespace_id): + """ + Create empty project on gitlab, and return the project info as returned by + GitLab on creation + + Parameters + ========== + gitlab_config: dict, gitlab configuration information (same as used elsewhere) + repo_name: str, name of the repository/project + namespace_id: int, ID of the group ("unapproved" or "approved") + + Returns + ======= + gitlab_project_info: dict, containing among other things, the name and + the remote URL for the project. + """ projects_url = "{}/projects/".format(gitlab_config["api_url"]) response = requests.post( projects_url, @@ -161,6 +176,19 @@ def get_or_create_project(config, namespace_ids, namespace, repo_name): def check_if_branch_exists(gitlab_config, branch_name, project_id): + """ + See if a branch with name branch_name already exists on this Project + + Parameters + ========== + gitlab_config: dict, gitlab configuration (as used elsewhere) + branch_name: str, name of branch to look for + project_id: int, id of the project, obtained from projects API endpoint + + Returns + ======= + branch_exists: bool, True if branch exists, False if not. + """ branches_url = "{}/projects/{}/repository/branches".format( gitlab_config["api_url"], project_id ) @@ -180,6 +208,26 @@ def check_if_branch_exists(gitlab_config, branch_name, project_id): def create_branch_if_not_exists( gitlab_config, branch_name, project_id, reference_branch="_gitlab_ingress_review", ): + """ + Create a new branch on an existing project if it does not exist already. + By default, use '_gitlab_ingress_review' as the branch name (which is unlikely + to exist in the source repo) as the reference branch from which to create the + new one. + + Parameters + ========== + gitlab_config: dict, gitlab configuration information (as used elsewhere) + branch_name: str, the desired name of the new branch + project_id: int, the ID of the project, which is the "id" value in + the dictionary of project information returned when + creating a new project or listing existing ones. + reference_branch: str, (default "_gitlab_ingress_review"), create the new + branch based on this branch + + Returns + ======= + dict, info about the branch (either existing or newly-created) from API endpoint + """ branch = check_if_branch_exists(gitlab_config, branch_name, project_id) if branch: logger.info("Branch %s exists for project %s", branch_name, project_id) @@ -206,6 +254,14 @@ def create_branch_if_not_exists( def check_if_merge_request_exists( gitlab_config, source_project_id, source_branch, target_project_id, target_branch, ): + """See if there is an existing merge request between the source and target + project/branch combinations. + + Returns + ======= + Either: the merge request (if it exists), or False + """ + mr_url = "{}/projects/{}/merge_requests".format( gitlab_config["api_url"], target_project_id ) @@ -234,6 +290,11 @@ def create_merge_request_if_not_exists( target_project_id, target_branch, ): + """ + Create a new merge request if one does not exist already. Return + the existing or newly created merge request information returned + by the API. + """ # Check whether requested MR exists, return it if it does mr = check_if_merge_request_exists( @@ -311,6 +372,23 @@ def clone_commit_and_push( target_project_url, commit_hash, ): + """ + Run shell commands to convert the unzipped directory containing the + repository contents into a git repo, then commit it on the branch + with the requested name. + + Parameters + ========== + path_to_unzipped_repo: str, the full directory path to the unzipped repo + tmp_repo_dir: str, path to a temporary dir where we will clone the project + branch_name: str, the name of the branch holding the snapshot + target_branch_name: str, the name of the branch to push to + remote_url: str, the URL for this project on gitlab-review to be added + as a remote. + target_project_url: str, the url of the original upstream project + commit_hash: str, the commit hash of the snapshot of the upstream project + """ + # Clone the repo subprocess.run(["git", "clone", remote_url, "cloned_repo"], cwd=tmp_repo_dir, check=True) working_dir = os.path.join(tmp_repo_dir, "cloned_repo") @@ -368,6 +446,23 @@ def fork_project( orig_project_id, fork_namespace_id, ): + """ + Fork the project with id 'orig_project_id' to `fork_namespace`/`repo_name` + after first checking whether the latter exists. + + Parameters + ========== + gitlab_config: dict, gitlab configuration information + fork_namespace: str, name of the namespace to create the fork + repo_name: str, name of the repo/project + orig_project_id: int, project id of the original (forked-from) project + fork_namespace_id: int, id of the namespace to fork into + + Returns + ======= + fork_project_info: dict, info of the newly created project from the API + """ + maybe_fork_project = gl.get_project(gitlab_config, fork_namespace, repo_name) if not maybe_fork_project: fork_url = "{}/projects/{}/fork".format( @@ -399,6 +494,17 @@ def fork_project( def unzipped_snapshot_to_merge_request(gitlab_config, snapshot_details, namespace_ids): + """ + Go through all the steps for a single repo/project. + + Parameters + ========== + gitlab_config: dict, contains api url and token + snapshot_details: tuple of strings, (repo_name, hash, desired_branch, location) + namespace_ids; dict, keys are the group names (e.g. "unapproved", "approved", values + are the ids of the corresponding namespaces in Gitlab + """ + # unpack tuple ( repo_name, diff --git a/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project_doc.py b/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project_doc.py deleted file mode 100644 index 9028e9e4f6..0000000000 --- a/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project_doc.py +++ /dev/null @@ -1,264 +0,0 @@ -from zipfile_to_gitlab_project import * - -__doc__ = """ -Start from zipfile of a particular commit - should have filename -of the form __.zip - -We want to turn this into a merge request on a Gitlab project. - -1) get useful gitlab stuff (url, api key, namespace_ids for our groups) -2) unzip zipfiles in specified directory -3) loop over unzipped repos. For each one: - a) see if "approved" project with same name exists, if not, create it - b) check if merge request to "approved/" with source and target branches - "commit-" and "" already exists. - If so, skip to the next unzipped repo. - b) see if "unapproved" project with same name exists, if not, fork "approved" one - c) clone "unapproved" project, and create branch called "commit-" - d) copy in contents of unzipped repo. - e) git add, commit and push to "unapproved" project - f) create branch "" on "approved" project - g) create merge request from unapproved/repo_name/commit_hash to - approved/repo_name/desired_branch_name -4) clean up - remove zipfiles and unpacked repos. -""" - -unzip_zipfiles.__doc__ = """ -Parameters -========== -zipfile_dir: str, path to directory containing zipfiles -tmp_unzipped_dir: str, path to directory where zipfiles will be unzipped - -Returns -======= -output_list: list of tuples -[(repo_name, commit_hash, desired_branch, unzipped-path),...] - -Note that the convention for the zipfile filenames is -__.zip -""" - -get_group_namespace_ids.__doc__ = """ -Find the namespace_id corresponding to the groups we're interested in, -e.g. 'approved' and 'unapproved'. - -Parameters -========== -gitlab_url: str, base URL for the API -gitlab_token: str, API token for Gitlab -groups: list of string, the group names to look for. - -Returns -======= -namespace_id_dict: dict, format {: } -""" - -get_gilab_project_list.__doc__ = """ -Get the list of Projects. - -Parameters -========== -gitlab_url: str, base URL for the API -gitlab_token: str, API token. - -Returns -======= -gitlab_projects: list of dictionaries. -""" - -check_if_project_exists.__doc__ = """ -Get a list of projects from the API - check if namespace_id (i.e. group) -and name match. - -Parameters -========== -repo_name: str, name of our repository/project -namespace_id: int, id of our group ("unapproved" or "approved") -gitlab_url: str, base URL of Gitlab API -gitlab_token: str, API key for Gitlab API. - -Returns -======= -bool, True if project exists, False otherwise. -""" - -get_or_create_project.__doc__ = """ -Check if project exists, and if so get its ID. Otherwise, create -it and return the ID. - -Parameters -========== -repo_name: str, name of the repository/project we're looking for. -namespace_id: int, the ID of the group ("unapproved" or "approved") -gitlab_url: str, base URL of the API -gitlab_token: str, API key - -Returns -======= -gitlab_project_url: str, the URL to be set as the "remote". -""" - -======= -create_project.__doc__ = """ -Create empty project on gitlab, and return the corresponding remote URL. - -Parameters -========== -repo_name: str, name of the repository/project -namespace_id: int, ID of the group ("unapproved" or "approved") -gitlab_url: str, base URL of the API -gitlab_token: str, API token. - -Returns -======= -gitlab_project_info: dict, containing among other things, the name and -the remote URL for the project. -""" - -check_if_branch_exists.__doc__ = """ -See if a branch with name branch_name already exists on this Project - -Parameters -========== -branch_name: str, name of branch to look for -project_id: int, id of the project, obtained from projects API endpoint -gitlab_url: base URL of the Gitlab API -gitlab_token: API token for the Gitlab API - -Returns -======= -branch_exists: bool, True if branch exists, False if not. -""" - -create_branch.__doc__ = """ -Create a new branch on an existing project. By default, use -'_gitlab_ingress_review' (which is unlikely to exist in the source -repo) as the reference branch from which to create the new one. - -Parameters -========== -branch_name: str, the desired name of the new branch -project_id: int, the ID of the project, which is the "id" value in -the dictionary of project information returned when -creating a new project or listing existing ones. -gitlab_url: str, the base URL for the Gitlab API -gitlab_token: str, the Gitlab API token -reference_branch: str, (default "_gitlab_ingress_review"), create the new -branch based on this branch - -Returns -======= -branch_info: dict, info about the branch from API endpoint -""" - -create_branch_if_not_exists.__doc__ = """ -Idempotent form of `create_branch`. - -Additional argument, (between gitlab_token and reference_branch) -log_project_info: str, prefix to use for logging messages -(most likely the repo name) -""" - -check_if_merge_request_exists.__doc__ = """ -See if there is an existing merge request between the source and target -project/branch combinations. - -Parameters -========== -source_branch: str, name of the branch on source project, will typically -be the commit_hash from the original repo. -target_project_id: int, project_id for the "approved" group's project. -target_branch: str, name of branch on target project, will typically -be the desired branch name. -gitlab_url: str, base URL for the Gitlab API -gitlab_token: str, API token for the Gitlab API. - -Returns -======= -bool, True if merge request already exists, False otherwise -""" - -create_merge_request.__doc__ = """ -Create a new MR, e.g. from the branch in the "unapproved" -group's project, to the branch in the "approved" -group's project. - -Parameters -========== -repo_name: str, name of the repository -source_project_id: int, project_id for the unapproved project, obtainable -as the "ID" field of the json returned from the -projects API endpoint. -source_branch: str, name of the branch on source project, will typically -be the 'branch-'. -target_project_id: int, project_id for the "approved" group's project. -target_branch: str, name of branch on target project, will typically -be the desired branch name. -gitlab_url: str, base URL for the Gitlab API -gitlab_token: str, API token for the Gitlab API. - -Returns -======= -mr_info: dict, the response from the API upon creating the Merge Request -""" - -create_merge_request_if_not_exists.__doc__ = """ -Idempotent form of `create_merge_request`. -""" - -clone_commit_and_push.__doc__ = """ -Run shell commands to convert the unzipped directory containing the -repository contents into a git repo, then commit it on the branch -with the requested name. - -Parameters -========== -repo_name: str, name of the repository/project -path_to_unzipped_repo: str, the full directory path to the unzipped repo -tmp_repo_dir: str, path to a temporary dir where we will clone the project -branch_name: str, the name of the branch to push to -remote_url: str, the URL for this project on gitlab-review to be added -as a "remote". -""" - -fork_project.__doc__ = """ -Fork the project 'approved/' to 'unapproved/' -after first checking whether the latter exists. - -Parameters -========== -repo_name: str, name of the repo/project -project_id: int, project id of the 'approved/' project -namespace_id: int, id of the 'unapproved' namespace -gitlab_url: str, str, the base URL of Gitlab API -gitlab_token: str, API token for Gitlab API - -Returns -======= -new_project_id: int, the id of the newly created 'unapproved/' project -""" - -unzipped_snapshot_to_merge_request = """ -Go through all the steps for a single repo/project. - -Parameters -========== -repo_details: tuple of strings, (repo_name, hash, desired_branch, location) -tmp_repo_dir: str, directory where we will clone the repo, then copy the contents in -gitlab_config: dict, contains api url and token -namespace_ids; dict, keys are the group names (e.g. "unapproved", "approved", values -are the ids of the corresponding namespaces in Gitlab -group_names: list of strings, typically ["unapproved", "approved"] -""" - -cleanup.__doc__ = """ -Remove directories and files after everything has been uploaded to gitlab - -Parameters -========== -zipfile_dir: str, directory containing the original zipfiles. Will not remove this -directory, but we will delete all the zipfiles in it. -tmp_unzipped_dir: str, directory where the unpacked zipfile contents are put. Remove. -tmp_repo_dir: str, directory where projects are cloned from Gitlab, then contents from -tmp_unzipped_dir are copied in. Remove. -""" From 2dab934e6fe574bb33ad12b4a429fcef716acf2f Mon Sep 17 00:00:00 2001 From: Oliver Strickson Date: Fri, 26 Jun 2020 16:25:03 +0100 Subject: [PATCH 112/155] Revert removal of gitlab_config.py command-line functionality --- .../cloud_init/scripts/gitlab_config.py | 59 ++++++++++++++++--- 1 file changed, 50 insertions(+), 9 deletions(-) diff --git a/deployment/secure_research_environment/cloud_init/scripts/gitlab_config.py b/deployment/secure_research_environment/cloud_init/scripts/gitlab_config.py index 4191c7f10e..2c108b1e7d 100755 --- a/deployment/secure_research_environment/cloud_init/scripts/gitlab_config.py +++ b/deployment/secure_research_environment/cloud_init/scripts/gitlab_config.py @@ -1,6 +1,7 @@ #!/usr/bin/env python3 import json +import argparse from pathlib import Path from urllib.parse import quote as url_quote import requests @@ -12,10 +13,43 @@ def http_error(msg, response): + response.status_code + "), content: " + response.text ) +def get_gitlab_config(file=None, server=None, value=None): + """Get GitLab server details and user secrets. + Parameters + ---------- + file : str, optional + Path to configuration file, by default None which resolves to + .secrets/gitlab-config.json in the user's home directory. + server : str, optional + Name of the server to get details for (must match format in config file), + by default None which returns alls ervers. + value : str, optional + Name of the configuration value to return, by default None which returns + all parameters. + Returns + ------- + dict or str + If server and value are not None, str of the requested value. If only + server or neither specified, dict of all the relevant values. + """ + if file is None: + file = f"{Path.home()}/.secrets/gitlab-config.json" + + with open(file, "r") as f: + config = json.load(f) + + if server is None and value is None: + return config + elif value is None: + return config[server] + elif server is None: + raise ValueError("If value is given, server must also be given.") + else: + return config[server][value] + def get_api_config(server, file=None): """Construct API URL, headers and other settings. - Parameters ---------- server : str @@ -23,20 +57,15 @@ def get_api_config(server, file=None): file : str Path to configuration file, by default None which resolves to .secrets/gitlab-config.json in the user's home directory. - Returns ------- dict Secrets api_url, api_token, ip and headers. """ - if file is None: - file = f"{Path.home()}/.secrets/gitlab-config.json" + config = get_gitlab_config(file=file, server=server, value=None) - with open(file, "r") as f: - config = json.load(f) - - ip = config[server]["ip_address"] - token = config[server]["api_token"] + ip = config["ip_address"] + token = config["api_token"] api_url = f"http://{ip}/api/v4" headers = {"Authorization": "Bearer " + token} @@ -99,3 +128,15 @@ def get_group_ids(gitlab_config): gitlab_groups = response.json() return {group["name"]: group["id"] for group in gitlab_groups} + + +if __name__ == "__main__": + parser = argparse.ArgumentParser(description="Get GitLab configuration values.") + parser.add_argument("--file", help="Location of config file.", default=None) + parser.add_argument( + "--server", help="Name of server to get config for.", default=None + ) + parser.add_argument("--value", help="Configuration value to get.", default=None) + args = parser.parse_args() + + print(get_gitlab_config(file=args.file, server=args.server, value=args.value)) From c82dfabd1cf7d65cc7b3207f2e084c7df9d5c761 Mon Sep 17 00:00:00 2001 From: Oliver Strickson Date: Fri, 26 Jun 2020 16:27:34 +0100 Subject: [PATCH 113/155] Lint --- .../cloud_init/scripts/gitlab_config.py | 1 + 1 file changed, 1 insertion(+) diff --git a/deployment/secure_research_environment/cloud_init/scripts/gitlab_config.py b/deployment/secure_research_environment/cloud_init/scripts/gitlab_config.py index 2c108b1e7d..dcfc1c58d0 100755 --- a/deployment/secure_research_environment/cloud_init/scripts/gitlab_config.py +++ b/deployment/secure_research_environment/cloud_init/scripts/gitlab_config.py @@ -13,6 +13,7 @@ def http_error(msg, response): + response.status_code + "), content: " + response.text ) + def get_gitlab_config(file=None, server=None, value=None): """Get GitLab server details and user secrets. Parameters From 90064acca3aaad9265c8ad7f378ad7e2e09b15d3 Mon Sep 17 00:00:00 2001 From: Oliver Strickson Date: Fri, 26 Jun 2020 16:27:49 +0100 Subject: [PATCH 114/155] Lint --- .../cloud_init/scripts/zipfile_to_gitlab_project.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py b/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py index b6c13d89ab..840b43c20e 100644 --- a/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py +++ b/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py @@ -63,13 +63,13 @@ def create_project(gitlab_config, repo_name, namespace_id): """ Create empty project on gitlab, and return the project info as returned by GitLab on creation - + Parameters ========== gitlab_config: dict, gitlab configuration information (same as used elsewhere) repo_name: str, name of the repository/project namespace_id: int, ID of the group ("unapproved" or "approved") - + Returns ======= gitlab_project_info: dict, containing among other things, the name and @@ -376,7 +376,7 @@ def clone_commit_and_push( Run shell commands to convert the unzipped directory containing the repository contents into a git repo, then commit it on the branch with the requested name. - + Parameters ========== path_to_unzipped_repo: str, the full directory path to the unzipped repo From c77453dab977ffec3bd967685c8da18c97f82295 Mon Sep 17 00:00:00 2001 From: Oliver Strickson Date: Fri, 26 Jun 2020 16:32:41 +0100 Subject: [PATCH 115/155] Flake8: ignore W503 (line break before binary operator) --- .flake8 | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.flake8 b/.flake8 index 0df6198052..b63726b58b 100644 --- a/.flake8 +++ b/.flake8 @@ -1,3 +1,5 @@ [flake8] # Ignore line length ignore = E501 +# Ignore line break before binary operator +ignore = W503 \ No newline at end of file From 5c858fb0a055e44da344c29489d5fbe489b6e9b4 Mon Sep 17 00:00:00 2001 From: Oliver Strickson Date: Fri, 26 Jun 2020 16:34:23 +0100 Subject: [PATCH 116/155] Fix to flake8 config file --- .flake8 | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/.flake8 b/.flake8 index b63726b58b..747eb67676 100644 --- a/.flake8 +++ b/.flake8 @@ -1,5 +1,4 @@ [flake8] -# Ignore line length -ignore = E501 -# Ignore line break before binary operator -ignore = W503 \ No newline at end of file +# Ignore line length (E501) +# and line break before binary operator (W503) +ignore = E501,W503 \ No newline at end of file From 435b33a9ff50ff5b01f3355388e2f7d446e1a22b Mon Sep 17 00:00:00 2001 From: Oliver Strickson Date: Fri, 26 Jun 2020 17:17:36 +0100 Subject: [PATCH 117/155] Fix NSG rules --- .../setup/Setup_SRE_WebApp_Servers.ps1 | 18 +++++++++++------- 1 file changed, 11 insertions(+), 7 deletions(-) diff --git a/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 b/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 index e0da99fe48..f62df23d1f 100644 --- a/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 +++ b/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 @@ -38,12 +38,14 @@ $gitlabAPIToken = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -S # Set up NSGs for the webapps # --------------------------- $nsgWebapps = Deploy-NetworkSecurityGroup -Name $config.sre.webapps.nsg -ResourceGroupName $config.sre.network.vnet.rg -Location $config.sre.location -Add-NetworkSecurityGroupRule -NetworkSecurityGroup $nsgAirlock ` + +# TODO fix this when this is no longer hard-coded +Add-NetworkSecurityGroupRule -NetworkSecurityGroup $nsgWebapps ` -Name "InboundAllowVpnSsh" ` -Description "Inbound allow SSH connections from VPN subnet" ` -Priority 1000 ` -Direction Inbound -Access Allow -Protocol TCP ` - -SourceAddressPrefix "172.16.201.0/24" -SourcePortRange * ` # TODO fix this when this is no longer hard-coded + -SourceAddressPrefix "172.16.201.0/24" -SourcePortRange * ` -DestinationAddressPrefix VirtualNetwork -DestinationPortRange 22 Add-NetworkSecurityGroupRule -NetworkSecurityGroup $nsgWebapps ` -Name "InboundAllowHttpSessionHost" ` @@ -59,7 +61,7 @@ Add-NetworkSecurityGroupRule -NetworkSecurityGroup $nsgWebapps ` -Direction Inbound -Access Allow -Protocol TCP ` -SourceAddressPrefix $config.sre.network.subnets.data.cidr -SourcePortRange * ` -DestinationAddressPrefix VirtualNetwork -DestinationPortRange 80,443 -Add-NetworkSecurityGroupRule -NetworkSecurityGroup $nsgAirlock ` +Add-NetworkSecurityGroupRule -NetworkSecurityGroup $nsgWebapps ` -Name "InboundDenyOtherVNet" ` -Description "Inbound deny other VNet connections" ` -Priority 4000 ` @@ -73,20 +75,22 @@ Add-NetworkSecurityGroupRule -NetworkSecurityGroup $nsgWebapps ` -Direction Outbound -Access Deny -Protocol * ` -SourceAddressPrefix VirtualNetwork -SourcePortRange * ` -DestinationAddressPrefix Internet -DestinationPortRange * -Add-NetworkSecurityGroupRule -NetworkSecurityGroup $nsgAirlock ` +Add-NetworkSecurityGroupRule -NetworkSecurityGroup $nsgWebapps ` -Name "OutboundDenyVNet" ` -Description "Outbound deny VNet connections" ` -Priority 3000 ` - -Direction Inbound -Access Deny -Protocol * ` + -Direction Outbound -Access Deny -Protocol * ` -SourceAddressPrefix VirtualNetwork -SourcePortRange * ` -DestinationAddressPrefix VirtualNetwork -DestinationPortRange * + +# TODO fix hard-coded cidr in InboundAllowVpnSsh $nsgAirlock = Deploy-NetworkSecurityGroup -Name $config.sre.network.nsg.airlock.name -ResourceGroupName $config.sre.network.vnet.rg -Location $config.sre.location Add-NetworkSecurityGroupRule -NetworkSecurityGroup $nsgAirlock ` -Name "InboundAllowVpnSsh" ` -Description "Inbound allow SSH connections from VPN subnet" ` -Priority 1000 ` -Direction Inbound -Access Allow -Protocol TCP ` - -SourceAddressPrefix "172.16.201.0/24" -SourcePortRange * ` # TODO fix this when this is no longer hard-coded + -SourceAddressPrefix "172.16.201.0/24" -SourcePortRange * ` -DestinationAddressPrefix VirtualNetwork -DestinationPortRange 22 Add-NetworkSecurityGroupRule -NetworkSecurityGroup $nsgAirlock ` -Name "InboundAllowReviewServer" ` @@ -113,7 +117,7 @@ Add-NetworkSecurityGroupRule -NetworkSecurityGroup $nsgAirlock ` -Name "OutboundDenyVNet" ` -Description "Outbound deny other VNet connections" ` -Priority 4000 ` - -Direction Inbound -Access Deny -Protocol * ` + -Direction Outbound -Access Deny -Protocol * ` -SourceAddressPrefix VirtualNetwork -SourcePortRange * ` -DestinationAddressPrefix VirtualNetwork -DestinationPortRange * From b96fc4ecd1e10d9ba2c9ddb8fe3b89b35835d780 Mon Sep 17 00:00:00 2001 From: Oliver Strickson Date: Mon, 29 Jun 2020 11:43:28 +0100 Subject: [PATCH 118/155] set subnet to airlock (setup webapp servers) --- .../setup/Setup_SRE_WebApp_Servers.ps1 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 b/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 index f62df23d1f..7b7d0a0d08 100644 --- a/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 +++ b/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 @@ -125,7 +125,7 @@ Add-NetworkSecurityGroupRule -NetworkSecurityGroup $nsgAirlock ` # --------------------------------- $vnet = Deploy-VirtualNetwork -Name $config.sre.network.vnet.name -ResourceGroupName $config.sre.network.vnet.rg -AddressPrefix $config.sre.network.vnet.cidr -Location $config.sre.location $null = Deploy-Subnet -Name $config.sre.network.subnets.data.name -VirtualNetwork $vnet -AddressPrefix $config.sre.network.subnets.data.cidr -$null = Deploy-Subnet -Name $config.sre.network.subnets.airlock.name -VirtualNetwork $vnet -AddressPrefix $config.sre.network.subnets.airlock.cidr +$subnet = Deploy-Subnet -Name $config.sre.network.subnets.airlock.name -VirtualNetwork $vnet -AddressPrefix $config.sre.network.subnets.airlock.cidr # Expand GitLab cloudinit From 9fa64be71d165021ebb2fae1bc2b7dee3d100be2 Mon Sep 17 00:00:00 2001 From: Oliver Strickson Date: Mon, 29 Jun 2020 11:44:05 +0100 Subject: [PATCH 119/155] Fix to crontab entry (gitlab review scripts) --- .../cloud_init/cloud-init-gitlab-review.template.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-review.template.yaml b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-review.template.yaml index 0188d173fa..17b84a611d 100644 --- a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-review.template.yaml +++ b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-review.template.yaml @@ -213,7 +213,7 @@ runcmd: # run zipfile_to_gitlab_project.py and check_merge_requests.py sequentially, and with a lock, to prevent # them from interfering. Return an error code - echo "*** Adding zipfile_to_gitlab_project.py and check_merge_requests.py to crontab ***" - - echo "*/10 * * * * gitlabdaemon \"flock -n /tmp/review_steps.lock bash -c 'EXIT_STATUS=0; zipfile_to_gitlab_project.py || EXIT_STATUS=$?; check_merge_requests.py || EXIT_STATUS=$?; exit $EXIT_STATUS' \" " >> /etc/crontab + - echo "*/10 * * * * gitlabdaemon flock -n /tmp/review_steps.lock bash -c 'EXIT_STATUS=0; zipfile_to_gitlab_project.py || EXIT_STATUS=\$?; check_merge_requests.py || EXIT_STATUS=\$?; exit \$EXIT_STATUS'" >> /etc/crontab # -------------------------------- # GIVE gitlabdaemon OWNERSHIP OF THEIR HOME DIRECTORY # -------------------------------- From 4c3dbca15dbe66a7df46c8b2083091a1dab6a3d4 Mon Sep 17 00:00:00 2001 From: Oliver Strickson Date: Mon, 29 Jun 2020 19:53:28 +0100 Subject: [PATCH 120/155] Store both subnets (WebApp servers) --- .../setup/Setup_SRE_WebApp_Servers.ps1 | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 b/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 index 7b7d0a0d08..7fde931ec4 100644 --- a/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 +++ b/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 @@ -124,8 +124,8 @@ Add-NetworkSecurityGroupRule -NetworkSecurityGroup $nsgAirlock ` # Check that VNET and subnets exist # --------------------------------- $vnet = Deploy-VirtualNetwork -Name $config.sre.network.vnet.name -ResourceGroupName $config.sre.network.vnet.rg -AddressPrefix $config.sre.network.vnet.cidr -Location $config.sre.location -$null = Deploy-Subnet -Name $config.sre.network.subnets.data.name -VirtualNetwork $vnet -AddressPrefix $config.sre.network.subnets.data.cidr -$subnet = Deploy-Subnet -Name $config.sre.network.subnets.airlock.name -VirtualNetwork $vnet -AddressPrefix $config.sre.network.subnets.airlock.cidr +$subnetData = Deploy-Subnet -Name $config.sre.network.subnets.data.name -VirtualNetwork $vnet -AddressPrefix $config.sre.network.subnets.data.cidr +$subnetAirlock = Deploy-Subnet -Name $config.sre.network.subnets.airlock.name -VirtualNetwork $vnet -AddressPrefix $config.sre.network.subnets.airlock.cidr # Expand GitLab cloudinit @@ -259,7 +259,7 @@ $sshKeys = $result.Value[0].Message | Select-String "\[stdout\]\s*([\s\S]*?)\s*\ $bootDiagnosticsAccount = Deploy-StorageAccount -Name $config.sre.storage.bootdiagnostics.accountName -ResourceGroupName $config.sre.storage.bootdiagnostics.rg -Location $config.sre.location $vmNameReview = $config.sre.webapps.gitlabreview.vmName $vmIpAddress = $config.sre.webapps.gitlabreview.ip -$vmNic = Deploy-VirtualMachineNIC -Name "$vmNameReview-NIC" -ResourceGroupName $config.sre.webapps.rg -Subnet $subnet -PrivateIpAddress $vmIpAddress -Location $config.sre.location +$vmNic = Deploy-VirtualMachineNIC -Name "$vmNameReview-NIC" -ResourceGroupName $config.sre.webapps.rg -Subnet $subnetAirlock -PrivateIpAddress $vmIpAddress -Location $config.sre.location # Expand GitLab review cloudinit From 7fc3f8652e7e1dbb55dad7fdfef63f891b6089c0 Mon Sep 17 00:00:00 2001 From: Oliver Strickson Date: Mon, 29 Jun 2020 19:54:07 +0100 Subject: [PATCH 121/155] Log message before slow key retrieval step --- .../setup/Setup_SRE_WebApp_Servers.ps1 | 1 + 1 file changed, 1 insertion(+) diff --git a/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 b/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 index 7fde931ec4..7bed674695 100644 --- a/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 +++ b/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 @@ -247,6 +247,7 @@ echo " $(cat /etc/ssh/ssh_host_rsa_key.pub | cut -d " " -f -2)" echo " $(cat /etc/ssh/ssh_host_ed25519_key.pub | cut -d " " -f -2)" echo " $(cat /etc/ssh/ssh_host_ecdsa_key.pub | cut -d " " -f -2)" '.Replace('', $config.sre.webapps.gitlab.ip) +Add-LogMessage -Level Info "Fetching ssh keys from gitlab..." $result = Invoke-RemoteScript -VMName $config.sre.webapps.gitlab.vmName -ResourceGroupName $config.sre.webapps.rg -Shell "UnixShell" -Script $script Add-LogMessage -Level Success "Fetching ssh keys from gitlab succeeded" # Extract everything in between the [stdout] and [stderr] blocks of the result message. i.e. all output of the script. From 17ce3b6d2a18b1961443decf30dc13c17f3cfd93 Mon Sep 17 00:00:00 2001 From: Oliver Strickson Date: Mon, 29 Jun 2020 19:56:08 +0100 Subject: [PATCH 122/155] Test C region now centralus (was uksouth) --- environment_configs/core/shm_testc_core_config.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/environment_configs/core/shm_testc_core_config.json b/environment_configs/core/shm_testc_core_config.json index d4707e668c..04e0c5916b 100644 --- a/environment_configs/core/shm_testc_core_config.json +++ b/environment_configs/core/shm_testc_core_config.json @@ -13,6 +13,6 @@ "stateCountyRegion": "London", "countryCode": "GB" }, - "location": "uksouth", + "location": "centralus", "ipPrefix": "10.0.0" } From 44b74ee4608d507a7ac361581f0e42a9a4016075 Mon Sep 17 00:00:00 2001 From: Oliver Strickson Date: Mon, 29 Jun 2020 19:56:31 +0100 Subject: [PATCH 123/155] Test C region centralus (was uksouth) - full config --- .../full/sre_testcsandbox_full_config.json | 116 ++++++++++++------ 1 file changed, 76 insertions(+), 40 deletions(-) diff --git a/environment_configs/full/sre_testcsandbox_full_config.json b/environment_configs/full/sre_testcsandbox_full_config.json index aecce4f20f..f93080e357 100644 --- a/environment_configs/full/sre_testcsandbox_full_config.json +++ b/environment_configs/full/sre_testcsandbox_full_config.json @@ -9,7 +9,7 @@ "stateCountyRegion": "London", "countryCode": "GB" }, - "location": "uksouth", + "location": "centralus", "adminSecurityGroupName": "Safe Haven Test Admins", "domain": { "fqdn": "testc.dsgroupdev.co.uk", @@ -133,7 +133,7 @@ "subscriptionName": "Turing SRE - Sandbox (SHM Test C)", "id": "sandbox", "shortName": "sre-sandbox", - "location": "uksouth", + "location": "centralus", "tier": "2", "adminSecurityGroupName": "Safe Haven Test Admins", "domain": { @@ -141,17 +141,21 @@ "netbiosName": "SANDBOX", "dn": "DC=sandbox,DC=testc,DC=dsgroupdev,DC=co,DC=uk", "securityGroups": { - "serverAdmins": { - "name": "SG SANDBOX Server Administrators", - "description": "SG SANDBOX Server Administrators" + "dataAdministrators": { + "name": "SG SANDBOX Data Administrators", + "description": "SG SANDBOX Data Administrators" }, - "sqlAdmins": { - "name": "SG SANDBOX SQL Server Administrators", - "description": "SG SANDBOX SQL Server Administrators" + "systemAdministrators": { + "name": "SG SANDBOX System Administrators", + "description": "SG SANDBOX System Administrators" }, "researchUsers": { "name": "SG SANDBOX Research Users", "description": "SG SANDBOX Research Users" + }, + "reviewUsers": { + "name": "SG SANDBOX Review Users", + "description": "SG SANDBOX Review Users" } } }, @@ -177,24 +181,36 @@ "prefix": "10.151.2", "cidr": "10.151.2.0/24" }, - "dbingress": { - "name": "DbIngressSubnet", + "databases": { + "name": "DatabasesSubnet", "prefix": "10.151.3", - "nsg": "dbingress", + "nsg": "databases", "cidr": "10.151.3.0/24" + }, + "airlock": { + "name": "AirlockSubnet", + "prefix": "10.151.4", + "nsg": "airlock", + "cidr": "10.151.4.0/24" } }, "nsg": { "data": {}, - "dbingress": { - "name": "NSG_SRE_SANDBOX_DB_INGRESS" + "databases": { + "name": "NSG_SRE_SANDBOX_DATABASES" + }, + "airlock": { + "name": "NSG_SRE_SANDBOX_AIRLOCK" } } }, "storage": { "artifacts": { "rg": "RG_SRE_ARTIFACTS", - "accountName": "sreartifactsjazbmpthnsgk" + "accountName": "sreartifactsjazbmpthnsgk", + "containers": { + "gitlabAirlockName": "gitlabairlock" + } }, "bootdiagnostics": { "rg": "RG_SRE_ARTIFACTS", @@ -216,13 +232,23 @@ "gitlabLdapPassword": "sre-sandbox-gitlab-ldap-password", "gitlabRootPassword": "sre-sandbox-gitlab-root-password", "gitlabUserPassword": "sre-sandbox-gitlab-user-password", + "gitlabUsername": "sre-sandbox-gitlab-username", + "gitlabPassword": "sre-sandbox-gitlab-password", + "gitlabAPIToken": "sre-sandbox-gitlab-api-token", "hackmdLdapPassword": "sre-sandbox-hackmd-ldap-password", "hackmdUserPassword": "sre-sandbox-hackmd-user-password", + "gitlabReviewUsername": "sre-sandbox-gitlab-review-username", + "gitlabReviewPassword": "sre-sandbox-gitlab-review-password", + "gitlabReviewAPIToken": "sre-sandbox-gitlab-review-api-token", "letsEncryptCertificate": "sre-sandbox-lets-encrypt-certificate", "npsSecret": "sre-sandbox-nps-secret", + "postgresDbAdminUsername": "sre-sandbox-postgresdb-admin-username", + "postgresDbAdminPassword": "sre-sandbox-postgresdb-admin-password", + "postgresVmAdminPassword": "sre-sandbox-postgresvm-admin-password", "rdsAdminPassword": "sre-sandbox-rdsvm-admin-password", "sqlAuthUpdateUsername": "sre-sandbox-sql-authupdate-user-username", "sqlAuthUpdateUserPassword": "sre-sandbox-sql-authupdate-user-password", + "sqlVmAdminPassword": "sre-sandbox-sqlvm-admin-password", "testResearcherPassword": "sre-sandbox-test-researcher-password", "webappAdminPassword": "sre-sandbox-webappvm-admin-password" } @@ -240,16 +266,28 @@ "dsvm": { "name": "SANDBOX DSVM LDAP", "samAccountName": "dsvmldapsandbox" + }, + "postgres": { + "name": "SANDBOX Postgres VM LDAP", + "samAccountName": "pgvmldapsandbox", + "passwordSecretName": "sre-sandbox-postgresvm-ldap-password" + } + }, + "serviceAccounts": { + "postgres": { + "name": "SANDBOX Postgres DB Service Account", + "samAccountName": "pgdbsrvcsandbox", + "passwordSecretName": "sre-sandbox-postgresdb-service-account-password" } }, "datamount": { - "name": "SANDBOX Data Mount", + "name": "SANDBOX Data Mount Service Account", "samAccountName": "datamountsandbox" }, "researchers": { "test": { "name": "SANDBOX Test Researcher", - "samAccountName": "testresrchsandbox" + "samAccountName": "researchersandbox" } } }, @@ -282,6 +320,14 @@ "hostname": "DKP-SRE-SANDBOX", "fqdn": "DKP-SRE-SANDBOX.testc.dsgroupdev.co.uk", "ip": "10.151.1.248" + }, + "sessionHost3": { + "vmName": "REV-SRE-SANDBOX", + "vmSize": "Standard_DS2_v2", + "nsg": "NSG_SRE_SANDBOX_RDS_SESSION_HOSTS", + "hostname": "REV-SRE-SANDBOX", + "fqdn": "REV-SRE-SANDBOX.testc.dsgroupdev.co.uk", + "ip": "10.151.1.247" } }, "dataserver": { @@ -300,39 +346,29 @@ "rg": "RG_SRE_WEBAPPS", "nsg": "NSG_SRE_SANDBOX_WEBAPPS", "gitlab": { - "vmName": "GITLAB-SRE-SANDBOX", + "vmName": "GITLAB-SANDBOX", "vmSize": "Standard_D2s_v3", - "hostname": "GITLAB-SRE-SANDBOX", - "fqdn": "GITLAB-SRE-SANDBOX.testc.dsgroupdev.co.uk", + "hostname": "GITLAB-SANDBOX", + "fqdn": "GITLAB-SANDBOX.testc.dsgroupdev.co.uk", "ip": "10.151.2.151" }, + "gitlabreview": { + "vmName": "GITLAB-REVIEW-SANDBOX", + "vmSize": "Standard_D2s_v3", + "hostname": "GITLAB-REVIEW-SANDBOX", + "fqdn": "GITLAB-REVIEW-SANDBOX.testc.dsgroupdev.co.uk", + "ip": "10.151.4.151" + }, "hackmd": { - "vmName": "HACKMD-SRE-SANDBOX", + "vmName": "HACKMD-SANDBOX", "vmSize": "Standard_D2s_v3", - "hostname": "HACKMD-SRE-SANDBOX", - "fqdn": "HACKMD-SRE-SANDBOX.testc.dsgroupdev.co.uk", + "hostname": "HACKMD-SANDBOX", + "fqdn": "HACKMD-SANDBOX.testc.dsgroupdev.co.uk", "ip": "10.151.2.152" } }, "databases": { - "rg": "RG_SRE_DATABASES", - "dbmssqlingress": { - "name": "SQL-ING-SANDBOX", - "enableSSIS": true, - "ipLastOctet": "4", - "port": "14330", - "sku": "sqldev", - "subnet": "dbingress", - "vmSize": "Standard_DS2_v2", - "datadisk": { - "size_gb": "2048", - "type": "Standard_LRS" - }, - "osdisk": { - "size_gb": "128", - "type": "Standard_LRS" - } - } + "rg": "RG_SRE_DATABASES" }, "dsvm": { "rg": "RG_SRE_COMPUTE", From 7acd3e50e85fc7d7c3e18e320b7384946c4f713b Mon Sep 17 00:00:00 2001 From: Oliver Strickson Date: Tue, 30 Jun 2020 09:49:08 +0100 Subject: [PATCH 124/155] Full path to scripts in crontab --- .../cloud_init/cloud-init-gitlab-review.template.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-review.template.yaml b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-review.template.yaml index 17b84a611d..dea0d5940e 100644 --- a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-review.template.yaml +++ b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-review.template.yaml @@ -213,7 +213,7 @@ runcmd: # run zipfile_to_gitlab_project.py and check_merge_requests.py sequentially, and with a lock, to prevent # them from interfering. Return an error code - echo "*** Adding zipfile_to_gitlab_project.py and check_merge_requests.py to crontab ***" - - echo "*/10 * * * * gitlabdaemon flock -n /tmp/review_steps.lock bash -c 'EXIT_STATUS=0; zipfile_to_gitlab_project.py || EXIT_STATUS=\$?; check_merge_requests.py || EXIT_STATUS=\$?; exit \$EXIT_STATUS'" >> /etc/crontab + - echo "*/10 * * * * gitlabdaemon flock -n /tmp/review_steps.lock bash -c 'EXIT_STATUS=0; /home/gitlabdaemon/zipfile_to_gitlab_project.py || EXIT_STATUS=\$?; /home/gitlabdaemon/check_merge_requests.py || EXIT_STATUS=\$?; exit \$EXIT_STATUS'" >> /etc/crontab # -------------------------------- # GIVE gitlabdaemon OWNERSHIP OF THEIR HOME DIRECTORY # -------------------------------- From 66964e032e8fb005362b116c6f2817be3539af3f Mon Sep 17 00:00:00 2001 From: James Robinson Date: Wed, 1 Jul 2020 13:20:42 +0100 Subject: [PATCH 125/155] Working ARM template for NSG webapps --- deployment/common/Deployments.psm1 | 97 +++++++++----- .../arm_templates/sre-nsg-rules-template.json | 126 ++++++++++++++++++ .../setup/Setup_SRE_WebApp_Servers.ps1 | 14 +- 3 files changed, 204 insertions(+), 33 deletions(-) create mode 100755 deployment/secure_research_environment/arm_templates/sre-nsg-rules-template.json diff --git a/deployment/common/Deployments.psm1 b/deployment/common/Deployments.psm1 index a262bb492c..bf5b6bf447 100644 --- a/deployment/common/Deployments.psm1 +++ b/deployment/common/Deployments.psm1 @@ -88,6 +88,35 @@ function Add-VmToNSG { Export-ModuleMember -Function Add-VmToNSG +# Ensure the specified storage container is empty +# ----------------------------------------------- +function Clear-StorageContainer { + param( + [Parameter(Mandatory = $true, HelpMessage = "Name of storage container to clear")] + $Name, + [Parameter(Mandatory = $true, HelpMessage = "Name of storage account where the container exists")] + $StorageAccount + ) + # delete existing blobs in the container + $blobs = @(Get-AzStorageBlob -Container $Name -Context $StorageAccount.Context) + $numBlobs = $blobs.Length + if ($numBlobs -gt 0) { + Add-LogMessage -Level Info "[ ] deleting $numBlobs blobs aready in container '$Name'..." + $blobs | ForEach-Object { Remove-AzStorageBlob -Blob $_.Name -Container $Name -Context $StorageAccount.Context -Force } + while ($numBlobs -gt 0) { + Start-Sleep -Seconds 5 + $numBlobs = (Get-AzStorageBlob -Container $Name -Context $StorageAccount.Context).Length + } + if ($?) { + Add-LogMessage -Level Success "Blob deletion succeeded" + } else { + Add-LogMessage -Level Fatal "Blob deletion failed!" + } + } +} +Export-ModuleMember -Function Clear-StorageContainer + + # Deploy an ARM template and log the output # ----------------------------------------- function Deploy-ArmTemplate { @@ -370,11 +399,13 @@ Export-ModuleMember -Function Deploy-ManagedDisk function Deploy-NetworkSecurityGroup { param( [Parameter(Mandatory = $true, HelpMessage = "Name of network security group to deploy")] - $Name, + [string]$Name, [Parameter(Mandatory = $true, HelpMessage = "Name of resource group to deploy into")] - $ResourceGroupName, + [string]$ResourceGroupName, [Parameter(Mandatory = $true, HelpMessage = "Location of resource group to deploy")] - $Location + [string]$Location, + [Parameter(Mandatory = $false, HelpMessage = "Remove all rules from this network security group")] + [switch]$RemoveAllRules = $false ) Add-LogMessage -Level Info "Ensuring that network security group '$Name' exists..." $nsg = Get-AzNetworkSecurityGroup -Name $Name -ResourceGroupName $ResourceGroupName -ErrorVariable notExists -ErrorAction SilentlyContinue @@ -389,6 +420,9 @@ function Deploy-NetworkSecurityGroup { } else { Add-LogMessage -Level InfoSuccess "Network security group '$Name' already exists" } + if ($RemoveAllRules) { + $nsg = Remove-AllNetworkSecurityGroupRules -Name $Name -ResourceGroupName $ResourceGroupName + } return $nsg } Export-ModuleMember -Function Deploy-NetworkSecurityGroup @@ -613,35 +647,6 @@ function Deploy-StorageContainer { Export-ModuleMember -Function Deploy-StorageContainer -# Ensure the specified storage container is empty -# ----------------------------------------------- -function Clear-StorageContainer { - param( - [Parameter(Mandatory = $true, HelpMessage = "Name of storage container to clear")] - $Name, - [Parameter(Mandatory = $true, HelpMessage = "Name of storage account where the container exists")] - $StorageAccount - ) - # delete existing blobs in the container - $blobs = @(Get-AzStorageBlob -Container $Name -Context $StorageAccount.Context) - $numBlobs = $blobs.Length - if ($numBlobs -gt 0) { - Add-LogMessage -Level Info "[ ] deleting $numBlobs blobs aready in container '$Name'..." - $blobs | ForEach-Object { Remove-AzStorageBlob -Blob $_.Name -Container $Name -Context $StorageAccount.Context -Force } - while ($numBlobs -gt 0) { - Start-Sleep -Seconds 5 - $numBlobs = (Get-AzStorageBlob -Container $Name -Context $StorageAccount.Context).Length - } - if ($?) { - Add-LogMessage -Level Success "Blob deletion succeeded" - } else { - Add-LogMessage -Level Fatal "Blob deletion failed!" - } - } -} -Export-ModuleMember -Function Clear-StorageContainer - - # Create Linux virtual machine if it does not exist # ------------------------------------------------- function Deploy-UbuntuVirtualMachine { @@ -980,6 +985,34 @@ function New-DNSZone { Export-ModuleMember -Function New-DNSZone +# Create network security group rule if it does not exist +# ------------------------------------------------------- +function Remove-AllNetworkSecurityGroupRules { + param( + [Parameter(Mandatory = $true, HelpMessage = "Name of the NSG from which to remove rules.")] + [string]$Name, + [Parameter(Mandatory = $true, HelpMessage = "Name of resource group that the NSG belongs to.")] + [string]$ResourceGroupName + ) + try { + $nsg = Get-AzNetworkSecurityGroup -Name $Name -ResourceGroupName $ResourceGroupName -ErrorAction Stop + } catch [Microsoft.Rest.Azure.CloudException] { + Add-LogMessage -Level Fatal "Could not find an NSG named '$Name'!" + } + $rules = Get-AzNetworkSecurityRuleConfig -NetworkSecurityGroup $nsg + Add-LogMessage -Level Info "[ ] Preparing to remove $($rules.Count) rules from NSG '$Name'..." + $null = $rules | ForEach-Object { Remove-AzNetworkSecurityRuleConfig -Name $_.Name -NetworkSecurityGroup $nsg } + $null = $nsg | Set-AzNetworkSecurityGroup + if ($?) { + Add-LogMessage -Level Success "Removed all rules from NSG '$Name'" + } else { + Add-LogMessage -Level Fatal "Failed to remove rules from NSG '$Name'" + } + return $nsg +} +Export-ModuleMember -Function Remove-AllNetworkSecurityGroupRules + + # Remove Virtual Machine # ---------------------- function Remove-VirtualMachine { diff --git a/deployment/secure_research_environment/arm_templates/sre-nsg-rules-template.json b/deployment/secure_research_environment/arm_templates/sre-nsg-rules-template.json new file mode 100755 index 0000000000..0c03e3ab4d --- /dev/null +++ b/deployment/secure_research_environment/arm_templates/sre-nsg-rules-template.json @@ -0,0 +1,126 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "ipAddressGitLab": { + "type": "string" + }, + "ipAddressGitLabReview": { + "type": "string" + }, + "ipAddressSessionHostApps": { + "type": "string" + }, + "ipAddressSessionHostReview": { + "type": "string" + }, + "nsgWebappsName": { + "type": "string" + }, + "subnetComputeCidr": { + "type": "string" + }, + "subnetVpnCidr": { + "type": "string" + } + }, + "variables": {}, + "resources": [ + { + "type": "Microsoft.Network/networkSecurityGroups/securityRules", + "apiVersion": "2020-04-01", + "name": "[concat(parameters('nsgWebappsName'), '/AllowInboundSshSubnetVpn')]", + "properties": { + "access": "Allow", + "description": "Allow inbound SSH connections from VPN subnet", + "destinationAddressPrefix": "VirtualNetwork", + "destinationPortRange": "22", + "direction": "Inbound", + "priority": 1000, + "protocol": "Tcp", + "sourceAddressPrefix": "[parameters('subnetVpnCidr')]", + "sourcePortRange": "*" + } + }, + { + "type": "Microsoft.Network/networkSecurityGroups/securityRules", + "apiVersion": "2020-04-01", + "name": "[concat(parameters('nsgWebappsName'), '/AllowInboundHttpSessionHostApps')]", + "properties": { + "access": "Allow", + "description": "Allow inbound http(s) from application session host", + "destinationAddressPrefix": "VirtualNetwork", + "destinationPortRanges": ["80", "443"], + "direction": "Inbound", + "priority": 2000, + "protocol": "Tcp", + "sourceAddressPrefix": "[parameters('ipAddressSessionHostApps')]", + "sourcePortRange": "*" + } + }, + { + "type": "Microsoft.Network/networkSecurityGroups/securityRules", + "apiVersion": "2020-04-01", + "name": "[concat(parameters('nsgWebappsName'), '/AllowInboundHttpSubnetCompute')]", + "properties": { + "access": "Allow", + "description": "Allow inbound http(s) from compute VM subnet", + "destinationAddressPrefix": "VirtualNetwork", + "destinationPortRanges": ["80", "443"], + "direction": "Inbound", + "priority": 3000, + "protocol": "Tcp", + "sourceAddressPrefix": "[parameters('subnetComputeCidr')]", + "sourcePortRange": "*" + } + }, + { + "type": "Microsoft.Network/networkSecurityGroups/securityRules", + "apiVersion": "2020-04-01", + "name": "[concat(parameters('nsgWebappsName'), '/AllowInboundHttpSshGitLabReview')]", + "properties": { + "access": "Allow", + "description": "Allow inbound ssh and http(s) connections from GitLab review VM", + "destinationAddressPrefix": "VirtualNetwork", + "destinationPortRanges": ["22", "80", "443"], + "direction": "Inbound", + "priority": 3500, + "protocol": "Tcp", + "sourceAddressPrefix": "[parameters('ipAddressGitLabReview')]", + "sourcePortRange": "*" + } + }, + { + "type": "Microsoft.Network/networkSecurityGroups/securityRules", + "apiVersion": "2020-04-01", + "name": "[concat(parameters('nsgWebappsName'), '/DenyInboundAnyAnywhere')]", + "properties": { + "access": "Deny", + "description": "Deny inbound connections from any other sources", + "destinationAddressPrefix": "*", + "destinationPortRange": "*", + "direction": "Inbound", + "priority": 4000, + "protocol": "*", + "sourceAddressPrefix": "*", + "sourcePortRange": "*" + } + }, + { + "type": "Microsoft.Network/networkSecurityGroups/securityRules", + "apiVersion": "2020-04-01", + "name": "[concat(parameters('nsgWebappsName'), '/DenyOutboundAnyAnywhere')]", + "properties": { + "access": "Deny", + "description": "Deny outbound connections to any other sources", + "destinationAddressPrefix": "*", + "destinationPortRange": "*", + "direction": "Outbound", + "priority": 4000, + "protocol": "*", + "sourceAddressPrefix": "*", + "sourcePortRange": "*" + } + } + ] +} \ No newline at end of file diff --git a/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 b/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 index 7bed674695..f26209706f 100644 --- a/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 +++ b/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 @@ -37,7 +37,19 @@ $gitlabAPIToken = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -S # Set up NSGs for the webapps # --------------------------- -$nsgWebapps = Deploy-NetworkSecurityGroup -Name $config.sre.webapps.nsg -ResourceGroupName $config.sre.network.vnet.rg -Location $config.sre.location +$nsgAirlock = Deploy-NetworkSecurityGroup -Name $config.sre.network.nsg.airlock.name -ResourceGroupName $config.sre.network.vnet.rg -Location $config.sre.location -RemoveAllRules +$nsgWebapps = Deploy-NetworkSecurityGroup -Name $config.sre.webapps.nsg -ResourceGroupName $config.sre.network.vnet.rg -Location $config.sre.location -RemoveAllRules +$params = @{ + ipAddressGitLab = $config.sre.webapps.gitlab.ip + ipAddressGitLabReview = $config.sre.webapps.gitlabreview.ip + ipAddressSessionHostApps = $config.sre.rds.sessionHost1.ip + ipAddressSessionHostReview = $config.sre.rds.sessionHost3.ip + nsgWebappsName = $config.sre.webapps.nsg + subnetComputeCidr = $config.sre.network.subnets.data.cidr + subnetVpnCidr = "172.16.201.0/24" +} +Deploy-ArmTemplate -TemplatePath (Join-Path $PSScriptRoot ".." "arm_templates" "sre-nsg-rules-template.json") -Params $params -ResourceGroupName $config.sre.network.vnet.rg + # TODO fix this when this is no longer hard-coded Add-NetworkSecurityGroupRule -NetworkSecurityGroup $nsgWebapps ` From 978db16196c454f858d94ee6084a376142f963fe Mon Sep 17 00:00:00 2001 From: James Robinson Date: Wed, 1 Jul 2020 13:35:47 +0100 Subject: [PATCH 126/155] Added ARM template rules for airlock NSG --- .../arm_templates/sre-nsg-rules-template.json | 83 ++++++++++++++++++ .../setup/Setup_SRE_WebApp_Servers.ps1 | 85 +------------------ 2 files changed, 85 insertions(+), 83 deletions(-) diff --git a/deployment/secure_research_environment/arm_templates/sre-nsg-rules-template.json b/deployment/secure_research_environment/arm_templates/sre-nsg-rules-template.json index 0c03e3ab4d..7c53fff186 100755 --- a/deployment/secure_research_environment/arm_templates/sre-nsg-rules-template.json +++ b/deployment/secure_research_environment/arm_templates/sre-nsg-rules-template.json @@ -14,6 +14,9 @@ "ipAddressSessionHostReview": { "type": "string" }, + "nsgAirlockName": { + "type": "string" + }, "nsgWebappsName": { "type": "string" }, @@ -121,6 +124,86 @@ "sourceAddressPrefix": "*", "sourcePortRange": "*" } + }, + { + "type": "Microsoft.Network/networkSecurityGroups/securityRules", + "apiVersion": "2020-04-01", + "name": "[concat(parameters('nsgAirlockName'), '/AllowInboundSshSubnetVpn')]", + "properties": { + "access": "Allow", + "description": "Allow inbound SSH connections from VPN subnet", + "destinationAddressPrefix": "VirtualNetwork", + "destinationPortRange": "22", + "direction": "Inbound", + "priority": 1000, + "protocol": "Tcp", + "sourceAddressPrefix": "[parameters('subnetVpnCidr')]", + "sourcePortRange": "*" + } + }, + { + "type": "Microsoft.Network/networkSecurityGroups/securityRules", + "apiVersion": "2020-04-01", + "name": "[concat(parameters('nsgAirlockName'), '/AllowInboundRdpSessionHostReview')]", + "properties": { + "access": "Allow", + "description": "Allow inbound RDP connections from GitLab review VM", + "destinationAddressPrefix": "VirtualNetwork", + "destinationPortRanges": ["3389"], + "direction": "Inbound", + "priority": 2000, + "protocol": "Tcp", + "sourceAddressPrefix": "[parameters('ipAddressSessionHostReview')]", + "sourcePortRange": "*" + } + }, + { + "type": "Microsoft.Network/networkSecurityGroups/securityRules", + "apiVersion": "2020-04-01", + "name": "[concat(parameters('nsgAirlockName'), '/DenyInboundAnyAnywhere')]", + "properties": { + "access": "Deny", + "description": "Deny inbound connections from any other sources", + "destinationAddressPrefix": "*", + "destinationPortRange": "*", + "direction": "Inbound", + "priority": 4000, + "protocol": "*", + "sourceAddressPrefix": "*", + "sourcePortRange": "*" + } + }, + { + "type": "Microsoft.Network/networkSecurityGroups/securityRules", + "apiVersion": "2020-04-01", + "name": "[concat(parameters('nsgAirlockName'), '/AllowOutboundHttpSshGitLab')]", + "properties": { + "access": "Allow", + "description": "Allow inbound ssh and http(s) connections from user-facing GitLab VM", + "destinationAddressPrefix": "[parameters('ipAddressGitLab')]", + "destinationPortRanges": ["22", "80", "443"], + "direction": "Outbound", + "priority": 3500, + "protocol": "Tcp", + "sourceAddressPrefix": "VirtualNetwork", + "sourcePortRange": "*" + } + }, + { + "type": "Microsoft.Network/networkSecurityGroups/securityRules", + "apiVersion": "2020-04-01", + "name": "[concat(parameters('nsgAirlockName'), '/DenyOutboundAnyAnywhere')]", + "properties": { + "access": "Deny", + "description": "Deny outbound connections to any other sources", + "destinationAddressPrefix": "*", + "destinationPortRange": "*", + "direction": "Outbound", + "priority": 4000, + "protocol": "*", + "sourceAddressPrefix": "*", + "sourcePortRange": "*" + } } ] } \ No newline at end of file diff --git a/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 b/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 index f26209706f..dfc2b9f9cc 100644 --- a/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 +++ b/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 @@ -44,95 +44,14 @@ $params = @{ ipAddressGitLabReview = $config.sre.webapps.gitlabreview.ip ipAddressSessionHostApps = $config.sre.rds.sessionHost1.ip ipAddressSessionHostReview = $config.sre.rds.sessionHost3.ip + nsgAirlockName = $config.sre.network.nsg.airlock.name nsgWebappsName = $config.sre.webapps.nsg subnetComputeCidr = $config.sre.network.subnets.data.cidr - subnetVpnCidr = "172.16.201.0/24" + subnetVpnCidr = "172.16.201.0/24" # TODO fix this when it is no longer hard-coded } Deploy-ArmTemplate -TemplatePath (Join-Path $PSScriptRoot ".." "arm_templates" "sre-nsg-rules-template.json") -Params $params -ResourceGroupName $config.sre.network.vnet.rg -# TODO fix this when this is no longer hard-coded -Add-NetworkSecurityGroupRule -NetworkSecurityGroup $nsgWebapps ` - -Name "InboundAllowVpnSsh" ` - -Description "Inbound allow SSH connections from VPN subnet" ` - -Priority 1000 ` - -Direction Inbound -Access Allow -Protocol TCP ` - -SourceAddressPrefix "172.16.201.0/24" -SourcePortRange * ` - -DestinationAddressPrefix VirtualNetwork -DestinationPortRange 22 -Add-NetworkSecurityGroupRule -NetworkSecurityGroup $nsgWebapps ` - -Name "InboundAllowHttpSessionHost" ` - -Description "Inbound allow http(s) from application session host" ` - -Priority 2000 ` - -Direction Inbound -Access Allow -Protocol TCP ` - -SourceAddressPrefix $config.sre.rds.sessionHost1.ip -SourcePortRange * ` - -DestinationAddressPrefix VirtualNetwork -DestinationPortRange 80,443 -Add-NetworkSecurityGroupRule -NetworkSecurityGroup $nsgWebapps ` - -Name "InboundAllowHttpComputeSubnet" ` - -Description "Inbound allow http(s) from compute VM subnet" ` - -Priority 3000 ` - -Direction Inbound -Access Allow -Protocol TCP ` - -SourceAddressPrefix $config.sre.network.subnets.data.cidr -SourcePortRange * ` - -DestinationAddressPrefix VirtualNetwork -DestinationPortRange 80,443 -Add-NetworkSecurityGroupRule -NetworkSecurityGroup $nsgWebapps ` - -Name "InboundDenyOtherVNet" ` - -Description "Inbound deny other VNet connections" ` - -Priority 4000 ` - -Direction Inbound -Access Deny -Protocol * ` - -SourceAddressPrefix VirtualNetwork -SourcePortRange * ` - -DestinationAddressPrefix VirtualNetwork -DestinationPortRange * -Add-NetworkSecurityGroupRule -NetworkSecurityGroup $nsgWebapps ` - -Name "OutboundDenyInternet" ` - -Description "Outbound deny internet" ` - -Priority 4000 ` - -Direction Outbound -Access Deny -Protocol * ` - -SourceAddressPrefix VirtualNetwork -SourcePortRange * ` - -DestinationAddressPrefix Internet -DestinationPortRange * -Add-NetworkSecurityGroupRule -NetworkSecurityGroup $nsgWebapps ` - -Name "OutboundDenyVNet" ` - -Description "Outbound deny VNet connections" ` - -Priority 3000 ` - -Direction Outbound -Access Deny -Protocol * ` - -SourceAddressPrefix VirtualNetwork -SourcePortRange * ` - -DestinationAddressPrefix VirtualNetwork -DestinationPortRange * - -# TODO fix hard-coded cidr in InboundAllowVpnSsh -$nsgAirlock = Deploy-NetworkSecurityGroup -Name $config.sre.network.nsg.airlock.name -ResourceGroupName $config.sre.network.vnet.rg -Location $config.sre.location -Add-NetworkSecurityGroupRule -NetworkSecurityGroup $nsgAirlock ` - -Name "InboundAllowVpnSsh" ` - -Description "Inbound allow SSH connections from VPN subnet" ` - -Priority 1000 ` - -Direction Inbound -Access Allow -Protocol TCP ` - -SourceAddressPrefix "172.16.201.0/24" -SourcePortRange * ` - -DestinationAddressPrefix VirtualNetwork -DestinationPortRange 22 -Add-NetworkSecurityGroupRule -NetworkSecurityGroup $nsgAirlock ` - -Name "InboundAllowReviewServer" ` - -Description "Inbound allow connections from review session host" ` - -Priority 2000 ` - -Direction Inbound -Access Allow -Protocol * ` - -SourceAddressPrefix $config.sre.rds.sessionHost3.ip -SourcePortRange * ` - -DestinationAddressPrefix VirtualNetwork -DestinationPortRange 3389 -Add-NetworkSecurityGroupRule -NetworkSecurityGroup $nsgAirlock ` - -Name "InboundDenyOtherVNet" ` - -Description "Inbound deny other VNet connections" ` - -Priority 4000 ` - -Direction Inbound -Access Deny -Protocol * ` - -SourceAddressPrefix VirtualNetwork -SourcePortRange * ` - -DestinationAddressPrefix VirtualNetwork -DestinationPortRange * -Add-NetworkSecurityGroupRule -NetworkSecurityGroup $nsgAirlock ` - -Name "OutboundAllowGitLabInternal" ` - -Description "Outbound allow GitLab internal server" ` - -Priority 3000 ` - -Direction Outbound -Access Deny -Protocol * ` - -SourceAddressPrefix VirtualNetwork -SourcePortRange * ` - -DestinationAddressPrefix $config.sre.webapps.gitlab.ip -DestinationPortRange * -Add-NetworkSecurityGroupRule -NetworkSecurityGroup $nsgAirlock ` - -Name "OutboundDenyVNet" ` - -Description "Outbound deny other VNet connections" ` - -Priority 4000 ` - -Direction Outbound -Access Deny -Protocol * ` - -SourceAddressPrefix VirtualNetwork -SourcePortRange * ` - -DestinationAddressPrefix VirtualNetwork -DestinationPortRange * - # Check that VNET and subnets exist # --------------------------------- $vnet = Deploy-VirtualNetwork -Name $config.sre.network.vnet.name -ResourceGroupName $config.sre.network.vnet.rg -AddressPrefix $config.sre.network.vnet.cidr -Location $config.sre.location From 3be6504391c60b532ac8c29ab06f7995c054287f Mon Sep 17 00:00:00 2001 From: James Robinson Date: Wed, 1 Jul 2020 21:20:34 +0100 Subject: [PATCH 127/155] Simplified webapp servers deployment flow --- deployment/common/Deployments.psm1 | 24 +- .../arm_templates/sre-webapps-template.json | 309 ------------------ .../cloud-init-gitlab-review.template.yaml | 3 +- .../cloud-init-gitlab.template.yaml | 2 +- .../Deploy_RDS_Environment.template.ps1 | 4 +- .../setup/Setup_SRE_VNET_RDS.ps1 | 39 +-- .../setup/Setup_SRE_WebApp_Servers.ps1 | 290 ++++++++-------- 7 files changed, 174 insertions(+), 497 deletions(-) delete mode 100644 deployment/secure_research_environment/arm_templates/sre-webapps-template.json diff --git a/deployment/common/Deployments.psm1 b/deployment/common/Deployments.psm1 index bf5b6bf447..88608cdd58 100644 --- a/deployment/common/Deployments.psm1 +++ b/deployment/common/Deployments.psm1 @@ -665,15 +665,24 @@ function Deploy-UbuntuVirtualMachine { $CloudInitYaml, [Parameter(Mandatory = $true, HelpMessage = "Location of resource group to deploy")] $Location, - [Parameter(Mandatory = $true, HelpMessage = "ID of network card to attach to this VM")] + [Parameter(Mandatory = $true, ParameterSetName="ByNicId_ByImageId", HelpMessage = "ID of network card to attach to this VM")] + [Parameter(Mandatory = $true, ParameterSetName="ByNicId_ByImageSku", HelpMessage = "ID of network card to attach to this VM")] $NicId, + [Parameter(Mandatory = $true, ParameterSetName="ByIpAddress_ByImageId", HelpMessage = "Private IP address to assign to this VM")] + [Parameter(Mandatory = $true, ParameterSetName="ByIpAddress_ByImageSku", HelpMessage = "Private IP address to assign to this VM")] + $PrivateIpAddress, + [Parameter(Mandatory = $true, ParameterSetName="ByIpAddress_ByImageId", HelpMessage = "Subnet to deploy this VM into")] + [Parameter(Mandatory = $true, ParameterSetName="ByIpAddress_ByImageSku", HelpMessage = "Subnet to deploy this VM into")] + $Subnet, [Parameter(Mandatory = $true, HelpMessage = "OS disk type (eg. Standard_LRS)")] $OsDiskType, [Parameter(Mandatory = $true, HelpMessage = "Name of resource group to deploy into")] $ResourceGroupName, - [Parameter(Mandatory = $true, ParameterSetName="ByImageId", HelpMessage = "ID of VM image to deploy")] + [Parameter(Mandatory = $true, ParameterSetName="ByNicId_ByImageId", HelpMessage = "ID of VM image to deploy")] + [Parameter(Mandatory = $true, ParameterSetName="ByIpAddress_ByImageId", HelpMessage = "ID of VM image to deploy")] $ImageId = $null, - [Parameter(Mandatory = $true, ParameterSetName="ByImageSku", HelpMessage = "SKU of VM image to deploy")] + [Parameter(Mandatory = $true, ParameterSetName="ByNicId_ByImageSku", HelpMessage = "SKU of VM image to deploy")] + [Parameter(Mandatory = $true, ParameterSetName="ByIpAddress_ByImageSku", HelpMessage = "SKU of VM image to deploy")] $ImageSku = $null, [Parameter(Mandatory = $false, HelpMessage = "Size of OS disk (GB)")] $OsDiskSizeGb = $null, @@ -689,10 +698,15 @@ function Deploy-UbuntuVirtualMachine { # Set source image to a custom image or to latest Ubuntu (default) if ($ImageId) { $vmConfig = Set-AzVMSourceImage -VM $vmConfig -Id $ImageId - } else { + } elseif ($ImageSku) { $vmConfig = Set-AzVMSourceImage -VM $vmConfig -PublisherName Canonical -Offer UbuntuServer -Skus $ImageSku -Version "latest" + } else { + Add-LogMessage -Level Fatal "Could not determine which source image to use!" } $vmConfig = Set-AzVMOperatingSystem -VM $vmConfig -Linux -ComputerName $Name -Credential $adminCredentials -CustomData $CloudInitYaml + if (-Not $NicId) { + $NicId = (Deploy-VirtualMachineNIC -Name "${Name}-NIC" -ResourceGroupName $ResourceGroupName -Subnet $Subnet -PrivateIpAddress $PrivateIpAddress -Location $Location).Id + } $vmConfig = Add-AzVMNetworkInterface -VM $vmConfig -Id $NicId -Primary if ($OsDiskSizeGb) { $vmConfig = Set-AzVMOSDisk -VM $vmConfig -StorageAccountType $OsDiskType -Name "$Name-OS-DISK" -CreateOption FromImage -DiskSizeInGB $OsDiskSizeGb @@ -1000,7 +1014,7 @@ function Remove-AllNetworkSecurityGroupRules { Add-LogMessage -Level Fatal "Could not find an NSG named '$Name'!" } $rules = Get-AzNetworkSecurityRuleConfig -NetworkSecurityGroup $nsg - Add-LogMessage -Level Info "[ ] Preparing to remove $($rules.Count) rules from NSG '$Name'..." + Add-LogMessage -Level Info "[ ] Preparing to remove $($rules.Count) rule(s) from NSG '$Name'..." $null = $rules | ForEach-Object { Remove-AzNetworkSecurityRuleConfig -Name $_.Name -NetworkSecurityGroup $nsg } $null = $nsg | Set-AzNetworkSecurityGroup if ($?) { diff --git a/deployment/secure_research_environment/arm_templates/sre-webapps-template.json b/deployment/secure_research_environment/arm_templates/sre-webapps-template.json deleted file mode 100644 index 9642bc7438..0000000000 --- a/deployment/secure_research_environment/arm_templates/sre-webapps-template.json +++ /dev/null @@ -1,309 +0,0 @@ -{ - "$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#", - "contentVersion": "1.0.0.0", - "parameters": { - "Administrator_Password": { - "type": "securestring", - "metadata": { - "description": "Enter name for VM Administrator_Password" - } - }, - "Administrator_User": { - "type": "string", - "metadata": { - "description": "Enter name for VM Administrator" - } - }, - "BootDiagnostics_Account_Name": { - "type": "string", - "metadata": { - "description": "Enter name of storage account used for boot diagnostics" - } - }, - "GitLab_Cloud_Init": { - "type": "string", - "defaultValue": "", - "metadata": { - "description": "String passed down to the Virtual Machine." - } - }, - "GitLab_IP_Address": { - "type": "string", - "defaultValue": "10.250.x.153", - "metadata": { - "description": "Enter IP address for VM, must end in 153" - } - }, - "GitLab_Server_Name": { - "defaultValue": "GITLAB", - "type": "string" - }, - "GitLab_VM_Size": { - "type": "string", - "defaultValue": "Standard_B2ms", - "allowedValues": [ - "Standard_B2ms", - "Standard_D2s_v3", - "Standard_DS2_v2", - "Standard_DS3_v2", - "Standard_F4s_v2" - ], - "metadata": { - "description": "Select size of VM" - } - }, - "HackMD_Cloud_Init": { - "type": "string", - "defaultValue": "", - "metadata": { - "description": "String passed down to the Virtual Machine." - } - }, - "HackMD_IP_Address": { - "type": "string", - "defaultValue": "10.250.x.154", - "metadata": { - "description": "Enter IP address for VM, must end in 154" - } - }, - "HackMD_Server_Name": { - "defaultValue": "HACKMD", - "type": "string" - }, - "HackMD_VM_Size": { - "type": "string", - "defaultValue": "Standard_B2ms", - "allowedValues": [ - "Standard_B2ms", - "Standard_D2s_v3", - "Standard_DS2_v2", - "Standard_DS3_v2", - "Standard_F4s_v2" - ], - "metadata": { - "description": "Select size of VM" - } - }, - "Virtual_Network_Name": { - "type": "string", - "defaultValue": "DSG_DSGROUPX_VNET1", - "metadata": { - "description": "Enter name of virtual network to provision this VM" - } - }, - "Virtual_Network_Resource_Group": { - "type": "string", - "metadata": { - "description": "Enter name of resource group that is associated with the virtual network above" - } - }, - "Virtual_Network_Subnet": { - "type": "string", - "defaultValue": "Subnet-Data", - "metadata": { - "description": "Enter name of subnet where you want to provision this VM" - } - } - }, - "variables": { - "gitlabnic": "[concat(parameters('GitLab_Server_Name'),'-','NIC')]", - "hackmdnic": "[concat(parameters('HackMD_Server_Name'),'-','NIC')]", - "vnetID": "[resourceId(parameters('Virtual_Network_Resource_Group'), 'Microsoft.Network/virtualNetworks', parameters('Virtual_Network_Name'))]", - "subnet": "[concat(variables('vnetID'),'/subnets/', parameters('Virtual_Network_Subnet'))]" - }, - "resources": [{ - "type": "Microsoft.Compute/virtualMachines", - "name": "[parameters('GitLab_Server_Name')]", - "apiVersion": "2018-06-01", - "location": "[resourceGroup().location]", - "scale": null, - "properties": { - "hardwareProfile": { - "vmSize": "[parameters('GitLab_VM_Size')]" - }, - "storageProfile": { - "imageReference": { - "publisher": "Canonical", - "offer": "UbuntuServer", - "sku": "18.04-LTS", - "version": "latest" - }, - "osDisk": { - "osType": "Linux", - "name": "[concat(parameters('GitLab_Server_Name'),'-OS-DISK')]", - "createOption": "FromImage", - "caching": "ReadWrite", - "writeAcceleratorEnabled": false, - "managedDisk": { - "storageAccountType": "Standard_LRS" - }, - "diskSizeGB": 64 - }, - "dataDisks": [{ - "lun": 1, - "name": "[concat(parameters('GitLab_Server_Name'),'-DATA-DISK')]", - "createOption": "Empty", - "caching": "None", - "writeAcceleratorEnabled": false, - "managedDisk": { - "storageAccountType": "Standard_LRS" - }, - "diskSizeGB": 512 - }] - }, - "osProfile": { - "computerName": "[parameters('GitLab_Server_Name')]", - "adminUsername": "[parameters('Administrator_User')]", - "adminPassword": "[parameters('Administrator_Password')]", - "linuxConfiguration": { - "disablePasswordAuthentication": false, - "provisionVMAgent": true - }, - "secrets": [], - "allowExtensionOperations": true, - "customData": "[parameters('GitLab_Cloud_Init')]" - }, - "networkProfile": { - "networkInterfaces": [{ - "id": "[resourceId('Microsoft.Network/networkInterfaces', variables('gitlabnic'))]", - "properties": { - "primary": true - } - }] - }, - "diagnosticsProfile": { - "bootDiagnostics": { - "enabled": true, - "storageUri": "[concat('https', '://', parameters('BootDiagnostics_Account_Name'), '.blob.core.windows.net', '/')]" - } - } - }, - "dependsOn": [ - "[resourceId('Microsoft.Network/networkInterfaces', variables('gitlabnic'))]" - ] - }, - { - "type": "Microsoft.Compute/virtualMachines", - "name": "[parameters('HackMD_Server_Name')]", - "apiVersion": "2018-06-01", - "location": "[resourceGroup().location]", - "scale": null, - "properties": { - "hardwareProfile": { - "vmSize": "[parameters('HackMD_VM_Size')]" - }, - "storageProfile": { - "imageReference": { - "publisher": "Canonical", - "offer": "UbuntuServer", - "sku": "18.04-LTS", - "version": "latest" - }, - "osDisk": { - "osType": "Linux", - "name": "[concat(parameters('HackMD_Server_Name'),'-OS-DISK')]", - "createOption": "FromImage", - "caching": "ReadWrite", - "writeAcceleratorEnabled": false, - "managedDisk": { - "storageAccountType": "Standard_LRS" - }, - "diskSizeGB": 64 - }, - "dataDisks": [{ - "lun": 1, - "name": "[concat(parameters('HackMD_Server_Name'),'-DATA-DISK')]", - "createOption": "Empty", - "caching": "None", - "writeAcceleratorEnabled": false, - "managedDisk": { - "storageAccountType": "Standard_LRS" - }, - "diskSizeGB": 512 - }] - }, - "osProfile": { - "computerName": "[parameters('HackMD_Server_Name')]", - "adminUsername": "[parameters('Administrator_User')]", - "adminPassword": "[parameters('Administrator_Password')]", - "linuxConfiguration": { - "disablePasswordAuthentication": false, - "provisionVMAgent": true - }, - "secrets": [], - "allowExtensionOperations": true, - "customData": "[parameters('HackMD_Cloud_Init')]" - }, - "networkProfile": { - "networkInterfaces": [{ - "id": "[resourceId('Microsoft.Network/networkInterfaces', variables('hackmdnic'))]", - "properties": { - "primary": true - } - }] - }, - "diagnosticsProfile": { - "bootDiagnostics": { - "enabled": true, - "storageUri": "[concat('https', '://', parameters('BootDiagnostics_Account_Name'), '.blob.core.windows.net', '/')]" - } - } - }, - "dependsOn": [ - "[resourceId('Microsoft.Network/networkInterfaces', variables('hackmdnic'))]" - ] - }, - { - "type": "Microsoft.Network/networkInterfaces", - "name": "[variables('gitlabnic')]", - "apiVersion": "2018-10-01", - "location": "[resourceGroup().location]", - "scale": null, - "properties": { - "ipConfigurations": [{ - "name": "ipconfig1", - "properties": { - "privateIPAddress": "[parameters('GitLab_IP_Address')]", - "privateIPAllocationMethod": "Static", - "subnet": { - "id": "[variables('subnet')]" - }, - "primary": true, - "privateIPAddressVersion": "IPv4" - } - }], - "enableAcceleratedNetworking": false, - "enableIPForwarding": false, - "primary": true, - "tapConfigurations": [] - }, - "dependsOn": [] - }, - { - "type": "Microsoft.Network/networkInterfaces", - "name": "[variables('hackmdnic')]", - "apiVersion": "2018-10-01", - "location": "[resourceGroup().location]", - "scale": null, - "properties": { - "ipConfigurations": [{ - "name": "ipconfig1", - "properties": { - "privateIPAddress": "[parameters('HackMD_IP_Address')]", - "privateIPAllocationMethod": "Static", - "subnet": { - "id": "[variables('subnet')]" - }, - "primary": true, - "privateIPAddressVersion": "IPv4" - } - }], - "enableAcceleratedNetworking": false, - "enableIPForwarding": false, - "primary": true, - "tapConfigurations": [] - }, - "dependsOn": [] - } - ] -} \ No newline at end of file diff --git a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-review.template.yaml b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-review.template.yaml index dea0d5940e..2aa600414c 100644 --- a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-review.template.yaml +++ b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab-review.template.yaml @@ -2,11 +2,12 @@ package_update: true package_upgrade: true +# Install necessary apt packages packages: - - git - apt-transport-https - ca-certificates - curl + - git - gitlab-ce - gnupg - ldap-utils diff --git a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab.template.yaml b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab.template.yaml index ae088882d8..00b102e3f1 100644 --- a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab.template.yaml +++ b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab.template.yaml @@ -2,7 +2,7 @@ package_update: true package_upgrade: true -# Install LDAP tools for debugging LDAP issues +# Install necessary apt packages packages: - apt-transport-https - ca-certificates diff --git a/deployment/secure_research_environment/remote/create_rds/templates/Deploy_RDS_Environment.template.ps1 b/deployment/secure_research_environment/remote/create_rds/templates/Deploy_RDS_Environment.template.ps1 index b977177b24..a21addc19b 100644 --- a/deployment/secure_research_environment/remote/create_rds/templates/Deploy_RDS_Environment.template.ps1 +++ b/deployment/secure_research_environment/remote/create_rds/templates/Deploy_RDS_Environment.template.ps1 @@ -90,8 +90,8 @@ try { $null = New-RDRemoteApp -ConnectionBroker "" -Alias "chrome (1)" -DisplayName "Code Review" -FilePath "C:\Program Files (x86)\Google\Chrome\Application\chrome.exe" -ShowInWebAccess 1 -CommandLineSetting Require -RequiredCommandLine "http://.151" -CollectionName "Review" -ErrorAction Stop $null = New-RDRemoteApp -ConnectionBroker "" -Alias "mstsc (1)" -DisplayName "Desktop (DSVM Main)" -FilePath "C:\Windows\system32\mstsc.exe" -ShowInWebAccess 1 -CommandLineSetting Require -RequiredCommandLine "-v .160" -CollectionName "Applications" -ErrorAction Stop $null = New-RDRemoteApp -ConnectionBroker "" -Alias "mstsc (2)" -DisplayName "Desktop (DSVM Other)" -FilePath "C:\Windows\system32\mstsc.exe" -ShowInWebAccess 1 -CollectionName "Applications" -ErrorAction Stop - $null = New-RDRemoteApp -ConnectionBroker "" -Alias "chrome (2)" -DisplayName "GitLab" -FilePath "C:\Program Files (x86)\Google\Chrome\Application\chrome.exe" -ShowInWebAccess 1 -CommandLineSetting Require -RequiredCommandLine "http://.151" -CollectionName "Applications" -ErrorAction Stop - $null = New-RDRemoteApp -ConnectionBroker "" -Alias "chrome (3)" -DisplayName "HackMD" -FilePath "C:\Program Files (x86)\Google\Chrome\Application\chrome.exe" -ShowInWebAccess 1 -CommandLineSetting Require -RequiredCommandLine "http://.152:3000" -CollectionName "Applications" -ErrorAction Stop + $null = New-RDRemoteApp -ConnectionBroker "" -Alias "chrome (2)" -DisplayName "GitLab" -FilePath "C:\Program Files (x86)\Google\Chrome\Application\chrome.exe" -ShowInWebAccess 1 -CommandLineSetting Require -RequiredCommandLine "http://.151" -CollectionName "Applications" -ErrorAction Stop + $null = New-RDRemoteApp -ConnectionBroker "" -Alias "chrome (3)" -DisplayName "HackMD" -FilePath "C:\Program Files (x86)\Google\Chrome\Application\chrome.exe" -ShowInWebAccess 1 -CommandLineSetting Require -RequiredCommandLine "http://.152:3000" -CollectionName "Applications" -ErrorAction Stop $null = New-RDRemoteApp -ConnectionBroker "" -Alias "putty (1)" -DisplayName "SSH (DSVM Main)" -FilePath "C:\Program Files\PuTTY\putty.exe" -ShowInWebAccess 1 -CommandLineSetting Require -RequiredCommandLine "-ssh .160" -CollectionName "Applications" -ErrorAction Stop $null = New-RDRemoteApp -ConnectionBroker "" -Alias "putty (2)" -DisplayName "SSH (DSVM Other)" -FilePath "C:\Program Files\PuTTY\putty.exe" -ShowInWebAccess 1 -CollectionName "Applications" -ErrorAction Stop Write-Output " [o] Registering applications succeeded" diff --git a/deployment/secure_research_environment/setup/Setup_SRE_VNET_RDS.ps1 b/deployment/secure_research_environment/setup/Setup_SRE_VNET_RDS.ps1 index 558097502e..71f2fced47 100644 --- a/deployment/secure_research_environment/setup/Setup_SRE_VNET_RDS.ps1 +++ b/deployment/secure_research_environment/setup/Setup_SRE_VNET_RDS.ps1 @@ -226,32 +226,33 @@ Add-LogMessage -Level Info "Upload RDS deployment scripts to storage..." # Expand deploy script $deployScriptLocalFilePath = (New-TemporaryFile).FullName $template = Join-Path $PSScriptRoot ".." "remote" "create_rds" "templates" "Deploy_RDS_Environment.template.ps1" | Get-Item | Get-Content -Raw -$template.Replace('',$airlockSubnetIpPrefix). - Replace('',$dataSubnetIpPrefix). - Replace('',$rdsGatewayVmFqdn). +$template.Replace('', $airlockSubnetIpPrefix). + Replace('', $dataSubnetIpPrefix). + Replace('', $dataSubnetIpPrefix). + Replace('', $rdsGatewayVmFqdn). Replace('', $rdsGatewayVmName). - Replace('',$rdsSh1VmFqdn). - Replace('',$rdsSh1VmName). - Replace('',$rdsSh2VmFqdn). - Replace('',$rdsSh2VmName). - Replace('',$rdsSh3VmFqdn). - Replace('',$rdsSh3VmName). - Replace('',$remoteUploadDir). - Replace('',$researchUserSgName). - Replace('',$reviewUserSgName). - Replace('',$shmDcAdminUsername). + Replace('', $rdsSh1VmFqdn). + Replace('', $rdsSh1VmName). + Replace('', $rdsSh2VmFqdn). + Replace('', $rdsSh2VmName). + Replace('', $rdsSh3VmFqdn). + Replace('', $rdsSh3VmName). + Replace('', $remoteUploadDir). + Replace('', $researchUserSgName). + Replace('', $reviewUserSgName). + Replace('', $shmDcAdminUsername). Replace('', $shmNetbiosName). - Replace('',$sreFqdn) | Out-File $deployScriptLocalFilePath + Replace('', $sreFqdn) | Out-File $deployScriptLocalFilePath # Expand server list XML $serverListLocalFilePath = (New-TemporaryFile).FullName $template = Join-Path $PSScriptRoot ".." "remote" "create_rds" "templates" "ServerList.template.xml" | Get-Item | Get-Content -Raw -$template.Replace('',$rdsGatewayVmFqdn). +$template.Replace('', $rdsGatewayVmFqdn). Replace('', $rdsGatewayVmName). - Replace('',$rdsSh1VmFqdn). - Replace('',$rdsSh2VmFqdn). - Replace('',$rdsSh3VmFqdn). - Replace('',$sreFqdn) | Out-File $serverListLocalFilePath + Replace('', $rdsSh1VmFqdn). + Replace('', $rdsSh2VmFqdn). + Replace('', $rdsSh3VmFqdn). + Replace('', $sreFqdn) | Out-File $serverListLocalFilePath # Copy installers from SHM storage Add-LogMessage -Level Info "[ ] Copying RDS installers to storage account '$($sreStorageAccount.StorageAccountName)'" diff --git a/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 b/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 index dfc2b9f9cc..25692cf6fc 100644 --- a/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 +++ b/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 @@ -23,7 +23,6 @@ Add-LogMessage -Level Info "Creating/retrieving secrets from key vault '$($confi $sreAdminUsername = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.Name -SecretName $config.sre.keyVault.secretNames.adminUsername -DefaultValue "sre$($config.sre.id)admin".ToLower() $sreAdminPassword = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -SecretName $config.sre.keyVault.secretNames.webappAdminPassword $gitlabRootPassword = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -SecretName $config.sre.keyVault.secretNames.gitlabRootPassword -$gitlabUserPassword = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -SecretName $config.sre.keyVault.secretNames.gitlabUserPassword $gitlabLdapPassword = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -SecretName $config.sre.keyVault.secretNames.gitlabLdapPassword $gitlabReviewUsername = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -SecretName $config.sre.keyVault.secretNames.gitlabReviewUsername -DefaultValue "ingress" $gitlabReviewPassword = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -SecretName $config.sre.keyVault.secretNames.gitlabReviewPassword @@ -55,53 +54,13 @@ Deploy-ArmTemplate -TemplatePath (Join-Path $PSScriptRoot ".." "arm_templates" " # Check that VNET and subnets exist # --------------------------------- $vnet = Deploy-VirtualNetwork -Name $config.sre.network.vnet.name -ResourceGroupName $config.sre.network.vnet.rg -AddressPrefix $config.sre.network.vnet.cidr -Location $config.sre.location -$subnetData = Deploy-Subnet -Name $config.sre.network.subnets.data.name -VirtualNetwork $vnet -AddressPrefix $config.sre.network.subnets.data.cidr $subnetAirlock = Deploy-Subnet -Name $config.sre.network.subnets.airlock.name -VirtualNetwork $vnet -AddressPrefix $config.sre.network.subnets.airlock.cidr +$subnetWebapps = Deploy-Subnet -Name $config.sre.network.subnets.data.name -VirtualNetwork $vnet -AddressPrefix $config.sre.network.subnets.data.cidr # NB. this is currently the SharedData subnet but will change soon -# Expand GitLab cloudinit -# ----------------------- -$shmDcFqdn = ($config.shm.dc.hostname + "." + $config.shm.domain.fqdn) -$gitlabFqdn = $config.sre.webapps.gitlab.hostname + "." + $config.sre.domain.fqdn -$gitlabLdapUserDn = "CN=" + $config.sre.users.ldap.gitlab.name + "," + $config.shm.domain.serviceOuPath -$gitlabUserFilter = "(&(objectClass=user)(memberOf=CN=" + $config.sre.domain.securityGroups.researchUsers.name + "," + $config.shm.domain.securityOuPath + "))" -$gitlabCloudInitTemplate = Join-Path $PSScriptRoot ".." "cloud_init" "cloud-init-gitlab.template.yaml" | Get-Item | Get-Content -Raw -$gitlabCloudInit = $gitlabCloudInitTemplate.Replace('', $shmDcFqdn). - Replace('', $gitlabLdapUserDn). - Replace('', $gitlabLdapPassword). - Replace('', $config.shm.domain.userOuPath). - Replace('', $gitlabUserFilter). - Replace('', $config.sre.webapps.gitlab.ip). - Replace('', $config.sre.webapps.gitlab.hostname). - Replace('', $gitlabFqdn). - Replace('', $gitlabRootPassword). - Replace('', $config.shm.domain.fqdn). - Replace('', $gitlabUsername). - Replace('', $gitlabPassword). - Replace('', $gitlabAPIToken) -# Encode as base64 -$gitlabCloudInitEncoded = [System.Convert]::ToBase64String([System.Text.Encoding]::UTF8.GetBytes($gitlabCloudInit)) - - -# Expand HackMD cloudinit -# ----------------------- -$hackmdFqdn = $config.sre.webapps.hackmd.hostname + "." + $config.sre.domain.fqdn -$hackmdUserFilter = "(&(objectClass=user)(memberOf=CN=" + $config.sre.domain.securityGroups.researchUsers.name + "," + $config.shm.domain.securityOuPath + ")(userPrincipalName={{username}}))" -$hackmdLdapUserDn = "CN=" + $config.sre.users.ldap.hackmd.name + "," + $config.shm.domain.serviceOuPath -$hackMdLdapUrl = "ldap://" + $config.shm.dc.fqdn -$hackmdCloudInitTemplate = Join-Path $PSScriptRoot ".." "cloud_init" "cloud-init-hackmd.template.yaml" | Get-Item | Get-Content -Raw -$hackmdCloudInit = $hackmdCloudInitTemplate.Replace('', $hackmdLdapUserDn). - Replace('', $hackmdLdapPassword). - Replace('',$hackmdUserFilter). - Replace('', $config.shm.domain.userOuPath). - Replace('', $config.sre.webapps.hackmd.ip). - Replace('', $config.sre.webapps.hackmd.hostname). - Replace('', $hackmdFqdn). - Replace('', $hackMdLdapUrl). - Replace('', $config.shm.domain.netbiosName). - Replace('', $hackmdPostgresPassword) -# Encode as base64 -$hackmdCloudInitEncoded = [System.Convert]::ToBase64String([System.Text.Encoding]::UTF8.GetBytes($hackmdCloudInit)) +# Attach NSGs to subnets +# ---------------------- +$subnetAirlock = Set-SubnetNetworkSecurityGroup -Subnet $subnetAirlock -VirtualNetwork $vnet -NetworkSecurityGroup $nsgAirlock # Create webapps resource group @@ -109,98 +68,128 @@ $hackmdCloudInitEncoded = [System.Convert]::ToBase64String([System.Text.Encoding $null = Deploy-ResourceGroup -Name $config.sre.webapps.rg -Location $config.sre.location -# Deploy GitLab/HackMD VMs from template +# Construct common deployment parameters # -------------------------------------- -Add-LogMessage -Level Info "Deploying GitLab/HackMD VMs from template..." -$params = @{ - Administrator_Password = (ConvertTo-SecureString $sreAdminPassword -AsPlainText -Force) - Administrator_User = $sreAdminUsername - BootDiagnostics_Account_Name = $config.sre.storage.bootdiagnostics.accountName - GitLab_Cloud_Init = $gitlabCloudInitEncoded - GitLab_IP_Address = $config.sre.webapps.gitlab.ip - GitLab_Server_Name = $config.sre.webapps.gitlab.vmName - GitLab_VM_Size = $config.sre.webapps.gitlab.vmSize - HackMD_Cloud_Init = $hackmdCloudInitEncoded - HackMD_IP_Address = $config.sre.webapps.hackmd.ip - HackMD_Server_Name = $config.sre.webapps.hackmd.vmName - HackMD_VM_Size = $config.sre.webapps.hackmd.vmSize - Virtual_Network_Name = $config.sre.network.vnet.name - Virtual_Network_Resource_Group = $config.sre.network.vnet.rg - Virtual_Network_Subnet = $config.sre.network.subnets.data.name +$commonDeploymentParams = @{ + AdminPassword = $sreAdminPassword + AdminUsername = $sreAdminUsername + BootDiagnosticsAccount = $(Deploy-StorageAccount -Name $config.sre.storage.bootdiagnostics.accountName -ResourceGroupName $config.sre.storage.bootdiagnostics.rg -Location $config.sre.location) + ImageSku = "18.04-LTS" + Location = $config.sre.location + OsDiskSizeGb = 64 + OsDiskType = "Standard_LRS" + ResourceGroupName = $config.sre.webapps.rg } -Deploy-ArmTemplate -TemplatePath (Join-Path $PSScriptRoot ".." "arm_templates" "sre-webapps-template.json") -Params $params -ResourceGroupName $config.sre.webapps.rg -# Poll VMs to see when they have finished running -# ----------------------------------------------- -Add-LogMessage -Level Info "Waiting for cloud-init provisioning to finish (this will take 5+ minutes)..." -$progress = 0 -$gitlabStatuses = (Get-AzVM -Name $config.sre.webapps.gitlab.vmName -ResourceGroupName $config.sre.webapps.rg -Status).Statuses.Code -$hackmdStatuses = (Get-AzVM -Name $config.sre.webapps.hackmd.vmName -ResourceGroupName $config.sre.webapps.rg -Status).Statuses.Code -while (-Not ($gitlabStatuses.Contains("ProvisioningState/succeeded") -and $gitlabStatuses.Contains("PowerState/stopped") -and - $hackmdStatuses.Contains("ProvisioningState/succeeded") -and $hackmdStatuses.Contains("PowerState/stopped"))) { - $progress = [math]::min(100, $progress + 1) - $gitlabStatuses = (Get-AzVM -Name $config.sre.webapps.gitlab.vmName -ResourceGroupName $config.sre.webapps.rg -Status).Statuses.Code - $hackmdStatuses = (Get-AzVM -Name $config.sre.webapps.hackmd.vmName -ResourceGroupName $config.sre.webapps.rg -Status).Statuses.Code - Write-Progress -Activity "Deployment status:" -Status "GitLab [$($gitlabStatuses[0]) $($gitlabStatuses[1])], HackMD [$($hackmdStatuses[0]) $($hackmdStatuses[1])]" -PercentComplete $progress - Start-Sleep 10 +# Deploy GitLab +# ------------- +# Construct GitLab cloudinit +$gitlabCloudInit = (Join-Path $PSScriptRoot ".." "cloud_init" "cloud-init-gitlab.template.yaml" | Get-Item | Get-Content -Raw). + Replace('', "CN=$($config.sre.users.ldap.gitlab.name),$($config.shm.domain.serviceOuPath)"). + Replace('', $gitlabLdapPassword). + Replace('', $config.shm.domain.userOuPath). + Replace('', "(&(objectClass=user)(memberOf=CN=$($config.sre.domain.securityGroups.researchUsers.name),$($config.shm.domain.securityOuPath)))"). + Replace('', $config.sre.webapps.gitlab.ip). + Replace('', $config.sre.webapps.gitlab.hostname). + Replace('', "$($config.sre.webapps.gitlab.hostname).$($config.sre.domain.fqdn)"). + Replace('', $gitlabRootPassword). + Replace('', $config.shm.domain.fqdn). + Replace('', $gitlabUsername). + Replace('', $gitlabPassword). + Replace('', $gitlabAPIToken) +# Set GitLab deployment parameters +$gitlabDataDisk = Deploy-ManagedDisk -Name "$($config.sre.webapps.gitlab.vmName)-DATA-DISK" -SizeGB 512 -Type "Standard_LRS" -ResourceGroupName $config.sre.webapps.rg -Location $config.sre.location +$gitlabDeploymentParams = @{ + CloudInitYaml = $gitlabCloudInit + DataDiskIds = @($gitlabDataDisk.Id) + Name = $config.sre.webapps.gitlab.vmName + PrivateIpAddress = $config.sre.webapps.gitlab.ip + Size = $config.sre.webapps.gitlab.vmSize + Subnet = $subnetWebapps } - - -# While webapp servers are off, ensure they are bound to correct NSG -# ------------------------------------------------------------------ -Add-LogMessage -Level Info "Ensure webapp servers and compute VMs are bound to correct NSG..." -foreach ($vmName in ($config.sre.webapps.hackmd.vmName, $config.sre.webapps.gitlab.vmName)) { - Add-VmToNSG -VMName $vmName -NSGName $nsgWebapps.Name +# Deploy GitLab VM +try { + Add-LogMessage -Level Warning "Temporarily allowing outbound internet access from $($config.sre.webapps.gitlab.ip)..." # Note that this has no effect at present + Add-NetworkSecurityGroupRule -NetworkSecurityGroup $nsgWebapps -Name "TmpAllowOutboundInternetGitlab" -SourceAddressPrefix $config.sre.webapps.gitlab.ip -Access Allow -Description "Allow outbound internet" -DestinationAddressPrefix Internet -DestinationPortRange * -Direction Outbound -Priority 100 -Protocol * -SourcePortRange * + $null = Deploy-UbuntuVirtualMachine @gitlabDeploymentParams @commonDeploymentParams + Wait-ForAzVMCloudInit -Name $config.sre.webapps.gitlab.vmName -ResourceGroupName $config.sre.webapps.rg + Add-VmToNSG -VMName $config.sre.webapps.gitlab.vmName -NSGName $config.sre.webapps.nsg + Enable-AzVM -Name $config.sre.webapps.gitlab.vmName -ResourceGroupName $config.sre.webapps.rg +} finally { + $null = Remove-AzNetworkSecurityRuleConfig -Name "TmpAllowOutboundInternetGitlab" -NetworkSecurityGroup $nsgWebapps } -# Reboot the HackMD and Gitlab servers -# ---------------------------------- -foreach ($nameVMNameParamsPair in (("HackMD", $config.sre.webapps.hackmd.vmName), ("GitLab", $config.sre.webapps.gitlab.vmName))) { - $name, $vmName = $nameVMNameParamsPair - Add-LogMessage -Level Info "Rebooting the $name VM: '$vmName'" - Enable-AzVM -Name $vmName -ResourceGroupName $config.sre.webapps.rg - if ($?) { - Add-LogMessage -Level Success "Rebooting the $name VM ($vmName) succeeded" - } else { - Add-LogMessage -Level Fatal "Rebooting the $name VM ($vmName) failed!" - } +# Deploy HackMD +# ------------- +# Construct HackMD cloudinit +$hackmdCloudInit = (Join-Path $PSScriptRoot ".." "cloud_init" "cloud-init-hackmd.template.yaml" | Get-Item | Get-Content -Raw). + Replace('', "CN=$($config.sre.users.ldap.hackmd.name),$($config.shm.domain.serviceOuPath)"). + Replace('', $hackmdLdapPassword). + Replace('', "(&(objectClass=user)(memberOf=CN=$($config.sre.domain.securityGroups.researchUsers.name),$($config.shm.domain.securityOuPath))(userPrincipalName={{username}}))"). + Replace('', $config.shm.domain.userOuPath). + Replace('', $config.sre.webapps.hackmd.ip). + Replace('', $config.sre.webapps.hackmd.hostname). + Replace('', "$($config.sre.webapps.hackmd.hostname).$($config.sre.domain.fqdn)"). + Replace('', "ldap://$($config.shm.dc.fqdn)"). + Replace('', $config.shm.domain.netbiosName). + Replace('', $hackmdPostgresPassword) +# Set HackMD deployment parameters +$hackmdDataDisk = Deploy-ManagedDisk -Name "$($config.sre.webapps.hackmd.vmName)-DATA-DISK" -SizeGB 512 -Type "Standard_LRS" -ResourceGroupName $config.sre.webapps.rg -Location $config.sre.location +$hackmdDeploymentParams = @{ + CloudInitYaml = $hackmdCloudInit + DataDiskIds = @($hackmdDataDisk.Id) + Name = $config.sre.webapps.hackmd.vmName + PrivateIpAddress = $config.sre.webapps.hackmd.ip + Size = $config.sre.webapps.hackmd.vmSize + Subnet = $subnetWebapps +} +# Deploy HackMD VM +try { + Add-LogMessage -Level Warning "Temporarily allowing outbound internet access from $($config.sre.webapps.hackmd.ip)..." # Note that this has no effect at present + Add-NetworkSecurityGroupRule -NetworkSecurityGroup $nsgWebapps -Name "TmpAllowOutboundInternetHackMD" -SourceAddressPrefix $config.sre.webapps.hackmd.ip -Access Allow -Description "Allow outbound internet" -DestinationAddressPrefix Internet -DestinationPortRange * -Direction Outbound -Priority 100 -Protocol * -SourcePortRange * + $null = Deploy-UbuntuVirtualMachine @hackmdDeploymentParams @commonDeploymentParams + Wait-ForAzVMCloudInit -Name $config.sre.webapps.hackmd.vmName -ResourceGroupName $config.sre.webapps.rg + Add-VmToNSG -VMName $config.sre.webapps.hackmd.vmName -NSGName $config.sre.webapps.nsg + Enable-AzVM -Name $config.sre.webapps.hackmd.vmName -ResourceGroupName $config.sre.webapps.rg +} finally { + $null = Remove-AzNetworkSecurityRuleConfig -Name "TmpAllowOutboundInternetHackMD" -NetworkSecurityGroup $nsgWebapps } -# Get public SSH keys from the GitLab server -# This allows it to be added as a known host on the GitLab review server -# ---------------------------------------------------------------------- -$script = ' -#! /bin/bash -echo " $(cat /etc/ssh/ssh_host_rsa_key.pub | cut -d " " -f -2)" -echo " $(cat /etc/ssh/ssh_host_ed25519_key.pub | cut -d " " -f -2)" -echo " $(cat /etc/ssh/ssh_host_ecdsa_key.pub | cut -d " " -f -2)" -'.Replace('', $config.sre.webapps.gitlab.ip) +# Deploy GitLab review +# -------------------- +# Get public SSH keys from the GitLab server, allowing it to be added as a known host on the GitLab review server Add-LogMessage -Level Info "Fetching ssh keys from gitlab..." +$script = @" +#! /bin/bash +echo " $(cut -d ' ' -f -2 /etc/ssh/ssh_host_rsa_key.pub)" +echo " $(cut -d ' ' -f -2 /etc/ssh/ssh_host_ed25519_key.pub)" +echo " $(cut -d ' ' -f -2 /etc/ssh/ssh_host_ecdsa_key.pub)" +"@.Replace("", $config.sre.webapps.gitlab.ip) $result = Invoke-RemoteScript -VMName $config.sre.webapps.gitlab.vmName -ResourceGroupName $config.sre.webapps.rg -Shell "UnixShell" -Script $script Add-LogMessage -Level Success "Fetching ssh keys from gitlab succeeded" -# Extract everything in between the [stdout] and [stderr] blocks of the result message. i.e. all output of the script. -$sshKeys = $result.Value[0].Message | Select-String "\[stdout\]\s*([\s\S]*?)\s*\[stderr\]" -# $sshKeys = $sshKeys.Matches.Groups[1].Value - - -# Deploy NIC and data disks for GitLab review server -# -------------------------------------------------- -$bootDiagnosticsAccount = Deploy-StorageAccount -Name $config.sre.storage.bootdiagnostics.accountName -ResourceGroupName $config.sre.storage.bootdiagnostics.rg -Location $config.sre.location -$vmNameReview = $config.sre.webapps.gitlabreview.vmName -$vmIpAddress = $config.sre.webapps.gitlabreview.ip -$vmNic = Deploy-VirtualMachineNIC -Name "$vmNameReview-NIC" -ResourceGroupName $config.sre.webapps.rg -Subnet $subnetAirlock -PrivateIpAddress $vmIpAddress -Location $config.sre.location - - -# Expand GitLab review cloudinit -# ------------------------------ -$gitlabReviewCloudInitTemplate = Join-Path $PSScriptRoot ".." "cloud_init" "cloud-init-gitlab-review.template.yaml" | Get-Item | Get-Content -Raw -$gitlabReviewFqdn = $config.sre.webapps.gitlabreview.hostname + "." + $config.sre.domain.fqdn -$gitlabReviewLdapUserDn = "CN=" + $config.sre.users.ldap.gitlab.name + "," + $config.shm.domain.serviceOuPath -$gitlabReviewUserFilter = "(&(objectClass=user)(memberOf=CN=" + $config.sre.domain.securityGroups.reviewUsers.name + "," + $config.shm.domain.securityOuPath + "))" - +$sshKeys = $result.Value[0].Message | Select-String "\[stdout\]\s*([\s\S]*?)\s*\[stderr\]" # Extract everything in between the [stdout] and [stderr] blocks of the result message. i.e. all output of the script. +# Construct GitLab review cloudinit +$gitlabReviewCloudInit = (Join-Path $PSScriptRoot ".." "cloud_init" "cloud-init-gitlab-review.template.yaml" | Get-Item | Get-Content -Raw). + Replace('', $sreAdminUsername). + Replace('', $config.sre.webapps.gitlab.ip). + Replace('', $gitlabUsername). + Replace('', $gitlabAPIToken). + Replace('', "$($config.shm.dc.hostname).$($config.shm.domain.fqdn)"). + Replace('', "CN=$($config.sre.users.ldap.gitlab.name),$($config.shm.domain.serviceOuPath)"). + Replace('', $gitlabLdapPassword). + Replace('', $config.shm.domain.userOuPath). + Replace('', "(&(objectClass=user)(memberOf=CN=$($config.sre.domain.securityGroups.reviewUsers.name),$($config.shm.domain.securityOuPath)))"). + Replace('', $config.sre.webapps.gitlabreview.ip). + Replace('', $config.sre.webapps.gitlabreview.hostname). + Replace('', "$($config.sre.webapps.gitlabreview.hostname).$($config.sre.domain.fqdn)"). + Replace('', $gitlabRootPassword). + Replace('', $config.shm.domain.fqdn). + Replace('', $gitlabReviewUsername). + Replace('', $gitlabReviewPassword). + Replace('', $gitlabReviewAPIToken) # Insert SSH keys and scripts into cloud init template, maintaining indentation $indent = " " foreach ($scriptName in @("zipfile_to_gitlab_project.py", @@ -213,45 +202,26 @@ foreach ($scriptName in @("zipfile_to_gitlab_project.py", $raw_script = Get-Content (Join-Path $PSScriptRoot ".." "cloud_init" "scripts" $scriptName) -Raw } $indented_script = $raw_script -split "`n" | ForEach-Object { "${indent}$_" } | Join-String -Separator "`n" - $gitlabReviewCloudInitTemplate = $gitlabReviewCloudInitTemplate.Replace("${indent}<$scriptName>", $indented_script) + $gitlabReviewCloudInit = $gitlabReviewCloudInit.Replace("${indent}<$scriptName>", $indented_script) } - -# Insert other variables into template -$gitlabReviewCloudInit = $gitlabReviewCloudInitTemplate.Replace('', $sreAdminUsername). - Replace('', $config.sre.webapps.gitlab.ip). - Replace('', $gitlabUsername). - Replace('', $gitlabAPIToken). - Replace('', $shmDcFqdn). - Replace('', $gitlabReviewLdapUserDn). - Replace('', $gitlabLdapPassword). - Replace('', $config.shm.domain.userOuPath). - Replace('', $gitlabReviewUserFilter). - Replace('', $config.sre.webapps.gitlabreview.ip). - Replace('', $config.sre.webapps.gitlabreview.hostname). - Replace('', $gitlabReviewFqdn). - Replace('', $gitlabRootPassword). - Replace('', $config.shm.domain.fqdn). - Replace('', $gitlabReviewUsername). - Replace('', $gitlabReviewPassword). - Replace('', $gitlabReviewAPIToken) -# Deploy VM and add to correct NSG when done -$params = @{ - Name = $vmNameReview - Size = $config.sre.webapps.gitlabreview.vmSize - AdminPassword = $sreAdminPassword - AdminUsername = $sreAdminUsername - BootDiagnosticsAccount = $bootDiagnosticsAccount +# Set GitLab review deployment parameters +$gitlabReviewDeploymentParams = @{ CloudInitYaml = $gitlabReviewCloudInit - location = $config.sre.location - NicId = $vmNic.Id - OsDiskType = "Standard_LRS" - ResourceGroupName = $config.sre.webapps.rg - ImageSku = "18.04-LTS" + Name = $config.sre.webapps.gitlabreview.vmName + PrivateIpAddress = $config.sre.webapps.gitlabreview.ip + Size = $config.sre.webapps.gitlabreview.vmSize + Subnet = $subnetAirlock +} +# Deploy GitLab review VM +try { + Add-LogMessage -Level Warning "Temporarily allowing outbound internet access from $($config.sre.webapps.gitlabreview.ip)..." + Add-NetworkSecurityGroupRule -NetworkSecurityGroup $nsgAirlock -Name "TmpAllowOutboundInternetGitlabReview" -SourceAddressPrefix $config.sre.webapps.gitlabreview.ip -Access Allow -Description "Allow outbound internet" -DestinationAddressPrefix Internet -DestinationPortRange * -Direction Outbound -Priority 100 -Protocol * -SourcePortRange * + $null = Deploy-UbuntuVirtualMachine @gitlabReviewDeploymentParams @commonDeploymentParams + Wait-ForAzVMCloudInit -Name $config.sre.webapps.gitlabreview.vmName -ResourceGroupName $config.sre.webapps.rg + Enable-AzVM -Name $config.sre.webapps.gitlabreview.vmName -ResourceGroupName $config.sre.webapps.rg +} finally { + $null = Remove-AzNetworkSecurityRuleConfig -Name "TmpAllowOutboundInternetGitlabReview" -NetworkSecurityGroup $nsgAirlock } -$null = Deploy-UbuntuVirtualMachine @params -Wait-ForAzVMCloudInit -Name $vmNameReview -ResourceGroupName $config.sre.webapps.rg -Add-VmToNSG -VMName $vmNameReview -NSGName $nsgAirlock -Enable-AzVM -Name $vmNameReview -ResourceGroupName $config.sre.webapps.rg # List VMs connected to each NSG @@ -264,4 +234,4 @@ foreach ($nsg in @($nsgWebapps, $nsgAirlock)) { # Switch back to original subscription # ------------------------------------ -$null = Set-AzContext -Context $originalContext; +$null = Set-AzContext -Context $originalContext From a845ba79935fa8ab43c8c7cb3f237869fb49c16c Mon Sep 17 00:00:00 2001 From: Oliver Strickson Date: Tue, 30 Jun 2020 13:58:49 +0100 Subject: [PATCH 128/155] Reflect Azure portal change in deployment (Point-to-Site -> User VPN) --- docs/deploy_sre_instructions.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/deploy_sre_instructions.md b/docs/deploy_sre_instructions.md index e365c90792..6c03dd5d07 100644 --- a/docs/deploy_sre_instructions.md +++ b/docs/deploy_sre_instructions.md @@ -75,7 +75,7 @@ This is done using the VPN which should have been deployed when setting up the S - This certificate will also allow you to connect via VPN to the SRE VNets once deployed. - **Configure the VPN connection** - - Navigate to the Safe Haven Management (SHM) VNet gateway in the SHM subscription via `Resource Groups -> RG_SHM_NETWORKING -> VNET_SHM__GW`, where `` is defined in the config file. Once there open the "Point-to-site configuration page under the `Settings` section in the left hand sidebar (see image below). + - Navigate to the Safe Haven Management (SHM) VNet gateway in the SHM subscription via `Resource Groups -> RG_SHM_NETWORKING -> VNET_SHM__GW`, where `` is defined in the config file. Once there open the "User VPN" configuration page under the `Settings` section in the left hand sidebar (see image below).

From 71774f833c1034678c9c7d3734d5decf4d055f17 Mon Sep 17 00:00:00 2001 From: Oliver Strickson Date: Tue, 30 Jun 2020 15:08:15 +0100 Subject: [PATCH 129/155] Allow (not deny) outbound access to GitLab Internal from Gitlab-Review --- .../setup/Setup_SRE_WebApp_Servers.ps1 | 1 - 1 file changed, 1 deletion(-) diff --git a/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 b/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 index 25692cf6fc..d5e93860ca 100644 --- a/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 +++ b/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 @@ -50,7 +50,6 @@ $params = @{ } Deploy-ArmTemplate -TemplatePath (Join-Path $PSScriptRoot ".." "arm_templates" "sre-nsg-rules-template.json") -Params $params -ResourceGroupName $config.sre.network.vnet.rg - # Check that VNET and subnets exist # --------------------------------- $vnet = Deploy-VirtualNetwork -Name $config.sre.network.vnet.name -ResourceGroupName $config.sre.network.vnet.rg -AddressPrefix $config.sre.network.vnet.cidr -Location $config.sre.location From 32a4de54e06b0056573488875abd06c59bae3fa9 Mon Sep 17 00:00:00 2001 From: Oliver Strickson Date: Thu, 2 Jul 2020 08:42:27 +0100 Subject: [PATCH 130/155] Clarify doc comment --- .../cloud_init/scripts/zipfile_to_gitlab_project.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py b/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py index 840b43c20e..d74691aff6 100644 --- a/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py +++ b/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py @@ -384,8 +384,9 @@ def clone_commit_and_push( branch_name: str, the name of the branch holding the snapshot target_branch_name: str, the name of the branch to push to remote_url: str, the URL for this project on gitlab-review to be added - as a remote. - target_project_url: str, the url of the original upstream project + as a remote ("unapproved"). + target_project_url: str, the url of the original imported project on + gitlab-review ("approved") commit_hash: str, the commit hash of the snapshot of the upstream project """ From ccab24d7354f587a9344254a553f6d99b72a6f10 Mon Sep 17 00:00:00 2001 From: James Robinson Date: Thu, 2 Jul 2020 17:39:21 +0100 Subject: [PATCH 131/155] Style changes for SRE_Upload_Git_Repo_to_GitlabReview --- .../SRE_Upload_Git_Repo_to_GitlabReview.ps1 | 132 +++++++++--------- 1 file changed, 66 insertions(+), 66 deletions(-) diff --git a/deployment/administration/SRE_Upload_Git_Repo_to_GitlabReview.ps1 b/deployment/administration/SRE_Upload_Git_Repo_to_GitlabReview.ps1 index 3c34574ba3..4e5af86300 100644 --- a/deployment/administration/SRE_Upload_Git_Repo_to_GitlabReview.ps1 +++ b/deployment/administration/SRE_Upload_Git_Repo_to_GitlabReview.ps1 @@ -1,15 +1,14 @@ param( [Parameter(Mandatory = $true, HelpMessage = "Enter SRE ID (usually a number e.g enter '9' for DSG9)")] [string]$sreId, - [Parameter( Mandatory = $true, HelpMessage = "Enter the git URL of the source repository")] + [Parameter(Mandatory = $true, HelpMessage = "Enter the git URL of the source repository")] [string]$sourceGitURL, - ## interpret the basename of the final path segment in a (possibly encoded) URI as the name of the repository - [Parameter( Mandatory = $false, HelpMessage = "Enter the name of the repository as it should appear within SRE GITLAB (default is the basename of the final path segment of the git URL)")] - [string]$targetRepoName = [uri]::UnescapeDataString((Split-Path -Path ([uri]$sourceGitURL).Segments[-1] -LeafBase)), - [Parameter( Mandatory = $true, HelpMessage = "Enter the full commit hash of the commit in the source repository to snapshot")] + [Parameter(Mandatory = $true, HelpMessage = "Enter the full commit hash of the commit in the source repository to snapshot")] [string]$sourceCommitHash, - [Parameter( Mandatory = $true, HelpMessage = "Enter the desired branch name where the snapshot should be placed (in the repository inside SRE GITLAB)")] - [string]$targetBranchName + [Parameter(Mandatory = $true, HelpMessage = "Enter the desired branch name where the snapshot should be placed (in the repository inside SRE GITLAB)")] + [string]$targetBranchName, + [Parameter(Mandatory = $false, HelpMessage = "Enter the name of the repository as it should appear within SRE GITLAB (default is the basename of the final path segment of the git URL)")] + [string]$targetRepoName ) Import-Module Az @@ -19,16 +18,25 @@ Import-Module $PSScriptRoot/../common/Logging.psm1 -Force Import-Module $PSScriptRoot/../common/Deployments.psm1 -Force Import-Module $PSScriptRoot/../common/GenerateSasToken.psm1 -Force + +# If no target repo name is provided then interpret the basename of the final path segment in a (possibly encoded) URI as the name of the repository +# -------------------------------------------------------------------------------------------------------------------------------------------------- +if (-not $targetRepoName) { + $targetRepoName = [uri]::UnescapeDataString((Split-Path -Path ([uri]$sourceGitURL).Segments[-1] -LeafBase)) +} + + # Get config and original context before changing subscription # ------------------------------------------------------------ $config = Get-SreConfig $sreId $originalContext = Get-AzContext -$_ = Set-AzContext -SubscriptionId $config.sre.subscriptionName +$null = Set-AzContext -SubscriptionId $config.sre.subscriptionName +$workingDir = Get-Location + # Create local zip file # --------------------- - -# The zipfile is called "repo.zip", with the following contents: +# The zipfile is called 'repo.zip', with the following contents: # # repo/ # sourceGitURL @@ -37,29 +45,20 @@ $_ = Set-AzContext -SubscriptionId $config.sre.subscriptionName # targetBranchName # snapshot/ # ... repository contents - - -Add-LogMessage -Level Info "Creating zipfilepath." -## $zipFileName = "${targetRepoName}_${sourceCommitHash}_${targetBranchName}.zip" $zipFileName = "repo.zip" +Add-LogMessage -Level Info "[ ] Creating local zip file '$zipFileName' using $sourceCommitHash from $sourceGitURL" -$tempDir = New-Item -ItemType Directory -Path (Join-Path ([System.IO.Path]::GetTempPath()) ([System.IO.Path]::GetRandomFileName())) - -$repoPath = Join-Path $tempDir "repo" -New-Item -ItemType Directory $repoPath - -## -$workingDir = Get-Location -Set-Location $repoPath - -Add-LogMessage -Level Info "About to git clone " -git clone $sourceGitURL snapshot - -Set-Location "snapshot" +# Create temporary directory and switch to it +$basePath = New-Item -ItemType Directory -Path (Join-Path ([System.IO.Path]::GetTempPath()) ([System.IO.Path]::GetRandomFileName())) +$repoPath = Join-Path $basePath "repo" +New-Item -ItemType Directory -Path $repoPath +# Checkout the repository and strip out its git history +$snapshotPath = Join-Path $repoPath "snapshot" +git clone $sourceGitURL $snapshotPath +Set-Location $snapshotPath git checkout $sourceCommitHash -# Remove the .git directory -Remove-Item -Path ".git" -Recurse -Force +Remove-Item -Path (Join-Path $snapshotPath ".git") -Recurse -Force ## Record some metadata about the repository Set-Location $repoPath @@ -69,69 +68,70 @@ $sourceCommitHash > sourceCommitHash $targetBranchName > targetBranchName # Zip contents and meta -Set-Location $tempDir - -$zipFilePath = Join-Path $tempDir $zipFileName +Set-Location $basePath +$zipFilePath = Join-Path $basePath $zipFileName Compress-Archive -CompressionLevel NoCompression -Path $repoPath -DestinationPath $zipFilePath if ($?) { - Add-LogMessage -Level Success "Zip file creation succeeded! $zipFilePath" + Add-LogMessage -Level Success "Successfully created zip file at $zipFilePath" } else { - Add-LogMessage -Level Fatal "Zip file creation failed!" + Add-LogMessage -Level Fatal "Failed to create zip file at $zipFilePath!" } Set-Location $workingDir -# Upload the zip file to the VM, via blob storage -# ----------------------------------------------- - -$gitlabReviewVmName = $config.sre.webapps.gitlabreview.vmName -# Go via blob storage - first create storage account if not already there +# Upload the zipfile to blob storage +# ---------------------------------- +$tmpContainerName = $config.sre.storage.artifacts.containers.gitlabAirlockName + "-" + [Guid]::NewGuid().ToString() +Add-LogMessage -Level Info "[ ] Uploading zipfile to container '$tmpContainerName'..." $storageResourceGroupName = $config.sre.storage.artifacts.rg -$sreStorageAccountName = $config.sre.storage.artifacts.accountName -$sreStorageAccount = Deploy-StorageAccount -Name $sreStorageAccountName -ResourceGroupName $storageResourceGroupName -Location $config.sre.location - -# Create a temporary storage container -$containerName = $config.sre.storage.artifacts.containers.gitlabAirlockName + "-" + [Guid]::NewGuid().ToString() - -# Ensure an empty storage container of the given name exists -$_ = Deploy-StorageContainer -Name $containerName -StorageAccount $sreStorageAccount - -# copy zipfile to blob storage -# ---------------------------- -Add-LogMessage -Level Info "Upload zipfile to storage..." +$sreStorageAccount = Deploy-StorageAccount -Name $config.sre.storage.artifacts.accountName -ResourceGroupName $storageResourceGroupName -Location $config.sre.location +$null = Deploy-StorageContainer -Name $tmpContainerName -StorageAccount $sreStorageAccount +$null = Set-AzStorageBlobContent -Container $tmpContainerName -Context $sreStorageAccount.Context -File $zipFilePath -Blob $zipFileName -Force +if ($?) { + Add-LogMessage -Level Success "Successfully uploaded zip file to '$tmpContainerName'" +} else { + Add-LogMessage -Level Fatal "Failed to upload zip file to '$tmpContainerName'!" +} -Set-AzStorageBlobContent -Container $containerName -Context $sreStorageAccount.Context -File $zipFilePath -Blob $zipFileName -Force -# Download zipfile onto the remote machine -# ---------------------------------------- -# Get a SAS token and construct URL +# Generate a SAS token and construct URL +# -------------------------------------- +Add-LogMessage -Level Info "[ ] Generating SAS token..." $sasToken = New-ReadOnlyAccountSasToken -ResourceGroup $storageResourceGroupName -AccountName $sreStorageAccount.StorageAccountName -SubscriptionName $config.sre.subscriptionName $remoteUrl = "https://$($sreStorageAccount.StorageAccountName).blob.core.windows.net/${containerName}/${zipFileName}${sasToken}" -Add-LogMessage -Level Info "Got SAS token and URL $remoteUrl" +Add-LogMessage -Level Success "Constructed upload URL $remoteUrl" -$sreAdminUsername = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.Name -SecretName $config.sre.keyVault.secretNames.adminUsername -DefaultValue "sre$($config.sre.id)admin".ToLower() +# Download the zipfile onto the remote machine +# -------------------------------------------- +$sreAdminUsername = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.Name -SecretName $config.sre.keyVault.secretNames.adminUsername -DefaultValue "sre$($config.sre.id)admin".ToLower() # Create remote script (make a subdirectory of /tmp/zipfiles and run CURL to download blob to there) $script = @" #!/bin/bash mkdir -p /tmp/zipfiles/ tmpdir=`$(mktemp -d /tmp/zipfiles/XXXXXXXXXXXXXXXXXXXX) curl -X GET -o `$tmpdir/${zipFileName} "${remoteUrl}" - chown -R ${sreAdminUsername}:${sreAdminUsername} /tmp/zipfiles/ "@ +Add-LogMessage -Level Info "[ ] Running remote script to download zipfile onto $($config.sre.webapps.gitlabreview.vmName)" +$result = Invoke-RemoteScript -Shell "UnixShell" -Script $script -VMName $config.sre.webapps.gitlabreview.vmName -ResourceGroupName $config.sre.webapps.rg +Write-Output $result.Value -$resourceGroupName = $config.sre.webapps.rg -Add-LogMessage -Level Info "[ ] Running remote script to download zipfile onto $gitlabReviewVmName" -$result = Invoke-RemoteScript -Shell "UnixShell" -Script $script -VMName $gitlabReviewVmName -ResourceGroupName $resourceGroupName -# clean up - remove the zipfile from local machine. -Add-LogMessage -Level Info "[ ] Removing original zipfile $zipFilePath" +# Clean up zipfile and blob storage container +# ------------------------------------------- +Add-LogMessage -Level Info "[ ] Cleaning up zipfile and blob storage container..." Remove-Item -Path $zipFilePath +$success = $? +Remove-AzStorageContainer -Name $tmpContainerName +$success = $success -and $? +if ($success) { + Add-LogMessage -Level Success "Successfully cleaned up resources" +} else { + Add-LogMessage -Level Fatal "Failed to clean up resources!" +} -# Remove the temporary storage container -Remove-AzStorageContainer -Name $containerName # Switch back to original subscription # ------------------------------------ -$_ = Set-AzContext -Context $originalContext +$null = Set-AzContext -Context $originalContext From f8ee363878a1ff5ad1d43a028102c2fd03aaa498 Mon Sep 17 00:00:00 2001 From: Oliver Strickson Date: Fri, 3 Jul 2020 14:33:41 +0100 Subject: [PATCH 132/155] Fix removing storage container: needs context; don't prompt user --- .../administration/SRE_Upload_Git_Repo_to_GitlabReview.ps1 | 2 ++ 1 file changed, 2 insertions(+) diff --git a/deployment/administration/SRE_Upload_Git_Repo_to_GitlabReview.ps1 b/deployment/administration/SRE_Upload_Git_Repo_to_GitlabReview.ps1 index 4e5af86300..d2ba78b381 100644 --- a/deployment/administration/SRE_Upload_Git_Repo_to_GitlabReview.ps1 +++ b/deployment/administration/SRE_Upload_Git_Repo_to_GitlabReview.ps1 @@ -131,6 +131,8 @@ if ($success) { Add-LogMessage -Level Fatal "Failed to clean up resources!" } +# Remove the temporary storage container +Remove-AzStorageContainer -Name $containerName -Context $sreStorageAccount.Context -Confirm $false # Switch back to original subscription # ------------------------------------ From 67baafa6e71421ce099dcec9cc180305d1369e82 Mon Sep 17 00:00:00 2001 From: Oliver Strickson Date: Fri, 3 Jul 2020 14:36:46 +0100 Subject: [PATCH 133/155] Add missing gitlab-rb-host template substitution (fixes inability to find LDAP users) --- .../setup/Setup_SRE_WebApp_Servers.ps1 | 1 + 1 file changed, 1 insertion(+) diff --git a/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 b/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 index d5e93860ca..2cda94f615 100644 --- a/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 +++ b/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 @@ -89,6 +89,7 @@ $gitlabCloudInit = (Join-Path $PSScriptRoot ".." "cloud_init" "cloud-init-gitla Replace('', $gitlabLdapPassword). Replace('', $config.shm.domain.userOuPath). Replace('', "(&(objectClass=user)(memberOf=CN=$($config.sre.domain.securityGroups.researchUsers.name),$($config.shm.domain.securityOuPath)))"). + Replace('', "$($config.shm.dc.hostname).$($config.shm.domain.fqdn)"). Replace('', $config.sre.webapps.gitlab.ip). Replace('', $config.sre.webapps.gitlab.hostname). Replace('', "$($config.sre.webapps.gitlab.hostname).$($config.sre.domain.fqdn)"). From cae41f39c19e2ea625f222120f6284e0f3735801 Mon Sep 17 00:00:00 2001 From: Oliver Strickson Date: Fri, 3 Jul 2020 14:50:45 +0100 Subject: [PATCH 134/155] containerName -> tmpContainerName --- .../administration/SRE_Upload_Git_Repo_to_GitlabReview.ps1 | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/deployment/administration/SRE_Upload_Git_Repo_to_GitlabReview.ps1 b/deployment/administration/SRE_Upload_Git_Repo_to_GitlabReview.ps1 index d2ba78b381..072795c60f 100644 --- a/deployment/administration/SRE_Upload_Git_Repo_to_GitlabReview.ps1 +++ b/deployment/administration/SRE_Upload_Git_Repo_to_GitlabReview.ps1 @@ -98,7 +98,7 @@ if ($?) { # -------------------------------------- Add-LogMessage -Level Info "[ ] Generating SAS token..." $sasToken = New-ReadOnlyAccountSasToken -ResourceGroup $storageResourceGroupName -AccountName $sreStorageAccount.StorageAccountName -SubscriptionName $config.sre.subscriptionName -$remoteUrl = "https://$($sreStorageAccount.StorageAccountName).blob.core.windows.net/${containerName}/${zipFileName}${sasToken}" +$remoteUrl = "https://$($sreStorageAccount.StorageAccountName).blob.core.windows.net/${tmpContainerName}/${zipFileName}${sasToken}" Add-LogMessage -Level Success "Constructed upload URL $remoteUrl" @@ -132,7 +132,7 @@ if ($success) { } # Remove the temporary storage container -Remove-AzStorageContainer -Name $containerName -Context $sreStorageAccount.Context -Confirm $false +Remove-AzStorageContainer -Name $tmpContainerName -Context $sreStorageAccount.Context -Confirm $false # Switch back to original subscription # ------------------------------------ From 7b844c3f6125c9491449e8c92f83eccc1d3cda72 Mon Sep 17 00:00:00 2001 From: Oliver Strickson Date: Fri, 3 Jul 2020 14:58:37 +0100 Subject: [PATCH 135/155] Fix cleaning up resources --- .../administration/SRE_Upload_Git_Repo_to_GitlabReview.ps1 | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/deployment/administration/SRE_Upload_Git_Repo_to_GitlabReview.ps1 b/deployment/administration/SRE_Upload_Git_Repo_to_GitlabReview.ps1 index 072795c60f..bff71bd0a6 100644 --- a/deployment/administration/SRE_Upload_Git_Repo_to_GitlabReview.ps1 +++ b/deployment/administration/SRE_Upload_Git_Repo_to_GitlabReview.ps1 @@ -123,7 +123,7 @@ Write-Output $result.Value Add-LogMessage -Level Info "[ ] Cleaning up zipfile and blob storage container..." Remove-Item -Path $zipFilePath $success = $? -Remove-AzStorageContainer -Name $tmpContainerName +Remove-AzStorageContainer -Name $tmpContainerName -Context $sreStorageAccount.Context -Confirm $false $success = $success -and $? if ($success) { Add-LogMessage -Level Success "Successfully cleaned up resources" @@ -131,9 +131,6 @@ if ($success) { Add-LogMessage -Level Fatal "Failed to clean up resources!" } -# Remove the temporary storage container -Remove-AzStorageContainer -Name $tmpContainerName -Context $sreStorageAccount.Context -Confirm $false - # Switch back to original subscription # ------------------------------------ $null = Set-AzContext -Context $originalContext From 815f2f024e37a900546da8c25c0724e36cc928a3 Mon Sep 17 00:00:00 2001 From: Oliver Strickson Date: Fri, 3 Jul 2020 15:02:39 +0100 Subject: [PATCH 136/155] Fix flag to Remove-AzStorageContainer --- .../administration/SRE_Upload_Git_Repo_to_GitlabReview.ps1 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/deployment/administration/SRE_Upload_Git_Repo_to_GitlabReview.ps1 b/deployment/administration/SRE_Upload_Git_Repo_to_GitlabReview.ps1 index bff71bd0a6..226b0ddb2b 100644 --- a/deployment/administration/SRE_Upload_Git_Repo_to_GitlabReview.ps1 +++ b/deployment/administration/SRE_Upload_Git_Repo_to_GitlabReview.ps1 @@ -123,7 +123,7 @@ Write-Output $result.Value Add-LogMessage -Level Info "[ ] Cleaning up zipfile and blob storage container..." Remove-Item -Path $zipFilePath $success = $? -Remove-AzStorageContainer -Name $tmpContainerName -Context $sreStorageAccount.Context -Confirm $false +Remove-AzStorageContainer -Name $tmpContainerName -Context $sreStorageAccount.Context -Force $success = $success -and $? if ($success) { Add-LogMessage -Level Success "Successfully cleaned up resources" From 0018a6963ebd543aab2c53c53c8b429a268a51b5 Mon Sep 17 00:00:00 2001 From: James Robinson Date: Mon, 6 Jul 2020 12:47:08 +0100 Subject: [PATCH 137/155] Minor fix to blob storage cleaner --- deployment/common/Deployments.psm1 | 23 ++++++++++--------- .../setup/Setup_SRE_VNET_RDS.ps1 | 2 +- 2 files changed, 13 insertions(+), 12 deletions(-) diff --git a/deployment/common/Deployments.psm1 b/deployment/common/Deployments.psm1 index c25727311d..9d81d0e363 100644 --- a/deployment/common/Deployments.psm1 +++ b/deployment/common/Deployments.psm1 @@ -107,31 +107,32 @@ Export-ModuleMember -Function Add-VmToNSG # Ensure the specified storage container is empty # ----------------------------------------------- -function Clear-StorageContainer { +function Clear-StorageContainerBlobs { param( [Parameter(Mandatory = $true, HelpMessage = "Name of storage container to clear")] $Name, [Parameter(Mandatory = $true, HelpMessage = "Name of storage account where the container exists")] $StorageAccount ) - # delete existing blobs in the container + # Delete existing blobs in the container $blobs = @(Get-AzStorageBlob -Container $Name -Context $StorageAccount.Context) - $numBlobs = $blobs.Length - if ($numBlobs -gt 0) { - Add-LogMessage -Level Info "[ ] deleting $numBlobs blobs aready in container '$Name'..." + if ($blobs.Length -gt 0) { + Add-LogMessage -Level Info "[ ] Deleting $numBlobs blobs already in container '$Name'..." $blobs | ForEach-Object { Remove-AzStorageBlob -Blob $_.Name -Container $Name -Context $StorageAccount.Context -Force } - while ($numBlobs -gt 0) { - Start-Sleep -Seconds 5 - $numBlobs = (Get-AzStorageBlob -Container $Name -Context $StorageAccount.Context).Length + while ((Get-AzStorageBlob -Container $Name -Context $StorageAccount.Context).Length) { + Start-Sleep 5 } if ($?) { - Add-LogMessage -Level Success "Blob deletion succeeded" + Add-LogMessage -Level Success "Removing blobs from $Name' succeeded" } else { - Add-LogMessage -Level Fatal "Blob deletion failed!" + Add-LogMessage -Level Fatal "Removing blobs from $Name' failed!" } + } else { + Add-LogMessage -Level InfoSuccess "Container '$Name' was already empty of blobs" } + } -Export-ModuleMember -Function Clear-StorageContainer +Export-ModuleMember -Function Clear-StorageContainerBlobs # Deploy an ARM template and log the output diff --git a/deployment/secure_research_environment/setup/Setup_SRE_VNET_RDS.ps1 b/deployment/secure_research_environment/setup/Setup_SRE_VNET_RDS.ps1 index 41583188e9..158731cb51 100644 --- a/deployment/secure_research_environment/setup/Setup_SRE_VNET_RDS.ps1 +++ b/deployment/secure_research_environment/setup/Setup_SRE_VNET_RDS.ps1 @@ -230,7 +230,7 @@ Deploy-ArmTemplate -TemplatePath (Join-Path $PSScriptRoot ".." "arm_templates" " Add-LogMessage -Level Info "Creating blob storage containers in storage account '$($sreStorageAccount.StorageAccountName)'..." foreach ($containerName in ($containerNameGateway, $containerNameSessionHosts)) { $null = Deploy-StorageContainer -Name $containerName -StorageAccount $sreStorageAccount - Clear-StorageContainer -Name $containerName -StorageAccount $sreStorageAccount + $null = Clear-StorageContainerBlobs -Name $containerName -StorageAccount $sreStorageAccount } From 243980d2d97bbb413b3a4ca4f1243dbf5575edc6 Mon Sep 17 00:00:00 2001 From: James Robinson Date: Mon, 6 Jul 2020 17:49:53 +0100 Subject: [PATCH 138/155] RDS fixes from redeploying. Includes a fix to make Deploy_RDS_Environment.template.ps1 idempotent by removing CAP/RAP settings --- deployment/common/Configuration.psm1 | 2 +- .../arm_templates/sre-rds-template.json | 4 +- .../Deploy_RDS_Environment.template.ps1 | 11 ++++- .../templates/ServerList.template.xml | 2 +- .../setup/Setup_SRE_VNET_RDS.ps1 | 43 +++++++++---------- .../setup/Update_SRE_RDS_SSL_Certificate.ps1 | 3 +- .../full/sre_testasandbox_full_config.json | 2 +- 7 files changed, 38 insertions(+), 29 deletions(-) diff --git a/deployment/common/Configuration.psm1 b/deployment/common/Configuration.psm1 index 050d9e79dd..eec3a6fa73 100644 --- a/deployment/common/Configuration.psm1 +++ b/deployment/common/Configuration.psm1 @@ -51,7 +51,7 @@ function Add-SreConfig { dataAdministrators = [ordered]@{ name = "SG $($config.sre.domain.netbiosName) Data Administrators" } systemAdministrators = [ordered]@{ name = "SG $($config.sre.domain.netbiosName) System Administrators" } researchUsers = [ordered]@{ name = "SG $($config.sre.domain.netbiosName) Research Users" } - reviewUsersGroup = [ordered]@{ name = "SG $($config.sre.domain.netbiosName) Review Users" } + reviewUsers = [ordered]@{ name = "SG $($config.sre.domain.netbiosName) Review Users" } } foreach ($groupName in $config.sre.domain.securityGroups.Keys) { $config.sre.domain.securityGroups[$groupName].description = $config.sre.domain.securityGroups[$groupName].name diff --git a/deployment/secure_research_environment/arm_templates/sre-rds-template.json b/deployment/secure_research_environment/arm_templates/sre-rds-template.json index 10c53de3bb..504a8a3cc2 100644 --- a/deployment/secure_research_environment/arm_templates/sre-rds-template.json +++ b/deployment/secure_research_environment/arm_templates/sre-rds-template.json @@ -494,7 +494,7 @@ }, "networkProfile": { "networkInterfaces": [{ - "id": "[resourceId('Microsoft.Network/networkInterfaces', variables('rdssh2nic'))]", + "id": "[resourceId('Microsoft.Network/networkInterfaces', variables('rdssh3nic'))]", "properties": { "primary": true } @@ -508,7 +508,7 @@ } }, "dependsOn": [ - "[resourceId('Microsoft.Network/networkInterfaces', variables('rdssh2nic'))]" + "[resourceId('Microsoft.Network/networkInterfaces', variables('rdssh3nic'))]" ] }, { diff --git a/deployment/secure_research_environment/remote/create_rds/templates/Deploy_RDS_Environment.template.ps1 b/deployment/secure_research_environment/remote/create_rds/templates/Deploy_RDS_Environment.template.ps1 index 334a7d2fbe..0cdf4eefbe 100644 --- a/deployment/secure_research_environment/remote/create_rds/templates/Deploy_RDS_Environment.template.ps1 +++ b/deployment/secure_research_environment/remote/create_rds/templates/Deploy_RDS_Environment.template.ps1 @@ -29,6 +29,15 @@ foreach ($server in $(Get-RDServer -ErrorAction SilentlyContinue)) { Remove-RDServer -ConnectionBroker "" -Server $server.Server -Role $role -Force -ErrorAction SilentlyContinue } } +foreach ($policyName in $(Get-Item "RDS:\GatewayServer\RAP" -ErrorAction SilentlyContinue | Get-ChildItem | ForEach-Object { $_.Name })) { + Write-Output "... removing existing RAP policy '$policyName'" + Remove-Item "RDS:\GatewayServer\RAP\${policyName}" -Recurse -ErrorAction SilentlyContinue +} +$null = Set-Item "RDS:\GatewayServer\CentralCAPEnabled" -Value 0 -ErrorAction SilentlyContinue +foreach ($policyName in $(Get-Item "RDS:\GatewayServer\CAP" -ErrorAction SilentlyContinue | Get-ChildItem | ForEach-Object { $_.Name })) { + Write-Output "... removing existing CAP policy '$policyName'" + Remove-Item "RDS:\GatewayServer\CAP\${policyName}" -Recurse -ErrorAction SilentlyContinue +} # Create RDS Environment @@ -87,7 +96,7 @@ foreach($rdsConfiguration in @(("Applications", "", "" -Alias "chrome (1)" -DisplayName "Code Review" -FilePath "C:\Program Files (x86)\Google\Chrome\Application\chrome.exe" -ShowInWebAccess 1 -CommandLineSetting Require -RequiredCommandLine "http://.151" -CollectionName "Review" -ErrorAction Stop + $null = New-RDRemoteApp -ConnectionBroker "" -Alias "chrome (1)" -DisplayName "Code Review" -FilePath "C:\Program Files (x86)\Google\Chrome\Application\chrome.exe" -ShowInWebAccess 1 -CommandLineSetting Require -RequiredCommandLine "http://" -CollectionName "Review" -ErrorAction Stop $null = New-RDRemoteApp -ConnectionBroker "" -Alias "mstsc (1)" -DisplayName "Desktop (DSVM Main)" -FilePath "C:\Windows\system32\mstsc.exe" -ShowInWebAccess 1 -CommandLineSetting Require -RequiredCommandLine "-v " -CollectionName "Applications" -ErrorAction Stop $null = New-RDRemoteApp -ConnectionBroker "" -Alias "mstsc (2)" -DisplayName "Desktop (DSVM Other)" -FilePath "C:\Windows\system32\mstsc.exe" -ShowInWebAccess 1 -CollectionName "Applications" -ErrorAction Stop $null = New-RDRemoteApp -ConnectionBroker "" -Alias "chrome (2)" -DisplayName "GitLab" -FilePath "C:\Program Files (x86)\Google\Chrome\Application\chrome.exe" -ShowInWebAccess 1 -CommandLineSetting Require -RequiredCommandLine "http://" -CollectionName "Applications" -ErrorAction Stop diff --git a/deployment/secure_research_environment/remote/create_rds/templates/ServerList.template.xml b/deployment/secure_research_environment/remote/create_rds/templates/ServerList.template.xml index b866aed06a..4d505a2eb8 100755 --- a/deployment/secure_research_environment/remote/create_rds/templates/ServerList.template.xml +++ b/deployment/secure_research_environment/remote/create_rds/templates/ServerList.template.xml @@ -1,5 +1,5 @@ - + diff --git a/deployment/secure_research_environment/setup/Setup_SRE_VNET_RDS.ps1 b/deployment/secure_research_environment/setup/Setup_SRE_VNET_RDS.ps1 index 158731cb51..825f20b0cf 100644 --- a/deployment/secure_research_environment/setup/Setup_SRE_VNET_RDS.ps1 +++ b/deployment/secure_research_environment/setup/Setup_SRE_VNET_RDS.ps1 @@ -34,19 +34,20 @@ $null = Deploy-Subnet -Name $config.sre.network.vnet.subnets.rds.name -VirtualNe # Remove existing peerings # ------------------------ -$shmPeeringName = "PEER_$($config.sre.network.vnet.Name)" -$srePeeringName = "PEER_$($config.shm.network.vnet.Name)" +$shmPeeringName = "PEER_$($config.sre.network.vnet.name)" +$srePeeringName = "PEER_$($config.shm.network.vnet.name)" try { # From SHM VNet - $_ = Set-AzContext -SubscriptionId $config.shm.subscriptionName -ErrorAction Stop + $null = Set-AzContext -SubscriptionId $config.shm.subscriptionName -ErrorAction Stop + $shmVnet = Get-AzVirtualNetwork -Name $config.shm.network.vnet.name -ResourceGroupName $config.shm.network.vnet.rg -ErrorAction Stop if (Get-AzVirtualNetworkPeering -VirtualNetworkName $config.shm.network.vnet.name -ResourceGroupName $config.shm.network.vnet.rg -ErrorAction Stop) { - Add-LogMessage -Level Info "[ ] Removing existing peering '$shmPeeringName' from '$($config.sre.network.vnet.name)' to '$($config.shm.network.vnet.name)'..." + Add-LogMessage -Level Info "[ ] Removing existing peering from '$($config.sre.network.vnet.name)' to '$($config.shm.network.vnet.name)'..." Remove-AzVirtualNetworkPeering -Name $shmPeeringName -VirtualNetworkName $config.shm.network.vnet.name -ResourceGroupName $config.shm.network.vnet.rg -Force -ErrorAction Stop } # From SRE VNet - $_ = Set-AzContext -SubscriptionId $config.sre.subscriptionName -ErrorAction Stop + $null = Set-AzContext -SubscriptionId $config.sre.subscriptionName -ErrorAction Stop if (Get-AzVirtualNetworkPeering -VirtualNetworkName $config.sre.network.vnet.name -ResourceGroupName $config.sre.network.vnet.rg -ErrorAction Stop) { - Add-LogMessage -Level Info "[ ] Removing existing peering '$srePeeringName' from '$($config.shm.network.vnet.name)' to '$($config.sre.network.vnet.name)'..." + Add-LogMessage -Level Info "[ ] Removing existing peering from '$($config.shm.network.vnet.name)' to '$($config.sre.network.vnet.name)'..." Remove-AzVirtualNetworkPeering -Name $srePeeringName -VirtualNetworkName $config.sre.network.vnet.name -ResourceGroupName $config.sre.network.vnet.rg -Force -ErrorAction Stop } # Success log message @@ -59,14 +60,14 @@ try { # Add new peerings between SHM and SRE VNets # ------------------------------------------ try { - $_ = Set-AzContext -SubscriptionId $config.shm.subscriptionName -ErrorAction Stop + $null = Set-AzContext -SubscriptionId $config.shm.subscriptionName -ErrorAction Stop Add-LogMessage -Level Info "[ ] Adding peering '$shmPeeringName' from '$($config.sre.network.vnet.name)' to '$($config.shm.network.vnet.name)'..." - $_ = Add-AzVirtualNetworkPeering -Name $shmPeeringName -VirtualNetwork $shmVnet -RemoteVirtualNetworkId $sreVnet.Id -AllowGatewayTransit -ErrorAction Stop + $null = Add-AzVirtualNetworkPeering -Name $shmPeeringName -VirtualNetwork $shmVnet -RemoteVirtualNetworkId $sreVnet.Id -AllowGatewayTransit -ErrorAction Stop # Add peering to SRE VNet # ----------------------- - $_ = Set-AzContext -SubscriptionId $config.sre.subscriptionName -ErrorAction Stop + $null = Set-AzContext -SubscriptionId $config.sre.subscriptionName -ErrorAction Stop Add-LogMessage -Level Info "[ ] Adding peering '$srePeeringName' from '$($config.shm.network.vnet.name)' to '$($config.sre.network.vnet.name)'..." - $_ = Add-AzVirtualNetworkPeering -Name $srePeeringName -VirtualNetwork $sreVnet -RemoteVirtualNetworkId $shmVnet.Id -UseRemoteGateways -ErrorAction Stop + $null = Add-AzVirtualNetworkPeering -Name $srePeeringName -VirtualNetwork $sreVnet -RemoteVirtualNetworkId $shmVnet.Id -UseRemoteGateways -ErrorAction Stop # Success log message Add-LogMessage -Level Success "Peering '$($config.shm.network.vnet.name)' and '$($config.sre.network.vnet.name)' succeeded" } catch { @@ -88,8 +89,10 @@ $vmNamePairs = @(("RDS Gateway", $config.sre.rds.gateway.vmName), # Set variables used in template expansion, retrieving from the key vault where appropriate # ----------------------------------------------------------------------------------------- Add-LogMessage -Level Info "Creating/retrieving secrets from key vault '$($config.sre.keyVault.name)'..." +$domainAdminUsername = Resolve-KeyVaultSecret -VaultName $config.shm.keyVault.name -SecretName $config.shm.keyVault.secretNames.domainAdminUsername +$domainJoinGatewayPassword = Resolve-KeyVaultSecret -VaultName $config.shm.keyVault.name -SecretName $config.shm.users.computerManagers.rdsGatewayServers.passwordSecretName -DefaultLength 20 +$domainJoinSessionHostPassword = Resolve-KeyVaultSecret -VaultName $config.shm.keyVault.name -SecretName $config.shm.users.computerManagers.rdsSessionServers.passwordSecretName -DefaultLength 20 $dsvmInitialIpAddress = Get-NextAvailableIpInRange -IpRangeCidr $config.sre.network.vnet.subnets.data.cidr -Offset 160 -$npsSecret = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -SecretName $config.sre.keyVault.secretNames.npsSecret -DefaultLength 12 $rdsGatewayAdminPassword = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -SecretName $config.sre.rds.gateway.adminPasswordSecretName -DefaultLength 20 $rdsGatewayVmFqdn = $config.sre.rds.gateway.fqdn $rdsGatewayVmName = $config.sre.rds.gateway.vmName @@ -102,8 +105,6 @@ $rdsSh3AdminPassword = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.na $rdsSh3VmFqdn = $config.sre.rds.sessionHost3.fqdn $researchUserSgName = $config.sre.domain.securityGroups.researchUsers.name $reviewUserSgName = $config.sre.domain.securityGroups.reviewUsers.name -$domainJoinGatewayPassword = Resolve-KeyVaultSecret -VaultName $config.shm.keyVault.name -SecretName $config.shm.users.computerManagers.rdsGatewayServers.passwordSecretName -DefaultLength 20 -$domainJoinSessionHostPassword = Resolve-KeyVaultSecret -VaultName $config.shm.keyVault.name -SecretName $config.shm.users.computerManagers.rdsSessionServers.passwordSecretName -DefaultLength 20 $shmNetbiosName = $config.shm.domain.netbiosName $sreAdminUsername = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -SecretName $config.sre.keyVault.secretNames.adminUsername -DefaultValue "sre$($config.sre.id)admin".ToLower() $sreDomain = $config.sre.domain.fqdn @@ -241,9 +242,11 @@ Add-LogMessage -Level Info "Upload RDS deployment scripts to storage..." # Expand deploy script $deployScriptLocalFilePath = (New-TemporaryFile).FullName $template = Join-Path $PSScriptRoot ".." "remote" "create_rds" "templates" "Deploy_RDS_Environment.template.ps1" | Get-Item | Get-Content -Raw -$template.Replace("", $config.sre.network.subnets.airlock.prefix). - Replace("", $config.sre.network.subnets.data.prefix). - Replace("", $config.sre.network.subnets.data.prefix). +$template.Replace("", $domainAdminUsername). + Replace("", $dsvmInitialIpAddress). + Replace("", $config.sre.webapps.gitlab.ip). + Replace("", $config.sre.webapps.gitlabreview.ip). + Replace("", $config.sre.webapps.hackmd.ip). Replace("", $rdsGatewayVmFqdn). Replace("", $rdsGatewayVmName). Replace("", $rdsSh1VmFqdn). @@ -256,9 +259,6 @@ $template.Replace("", $config.sre.network.subnets.airlock Replace("", $researchUserSgName). Replace("", $reviewUserSgName). Replace("", $shmNetbiosName). - Replace("", $dsvmInitialIpAddress). - Replace("", $config.sre.webapps.gitlab.ip). - Replace("", $config.sre.webapps.hackmd.ip). Replace("", $sreDomain) | Out-File $deployScriptLocalFilePath @@ -266,11 +266,9 @@ $template.Replace("", $config.sre.network.subnets.airlock $serverListLocalFilePath = (New-TemporaryFile).FullName $template = Join-Path $PSScriptRoot ".." "remote" "create_rds" "templates" "ServerList.template.xml" | Get-Item | Get-Content -Raw $template.Replace("", $rdsGatewayVmFqdn). - Replace("", $rdsGatewayVmName). Replace("", $rdsSh1VmFqdn). Replace("", $rdsSh2VmFqdn). - Replace("", $rdsSh3VmFqdn). - Replace("", $sreDomain) | Out-File $serverListLocalFilePath + Replace("", $rdsSh3VmFqdn) | Out-File $serverListLocalFilePath # Copy installers from SHM storage Add-LogMessage -Level Info "[ ] Copying RDS installers to storage account '$($sreStorageAccount.StorageAccountName)'" @@ -364,6 +362,7 @@ foreach ($blob in Get-AzStorageBlob -Container $containerNameSessionHosts -Conte foreach ($blob in Get-AzStorageBlob -Container $containerNameGateway -Context $sreStorageAccount.Context) { $blobfiles[$config.sre.rds.gateway.vmName] += @{$containerNameGateway = $blob.Name} } +$null = Set-AzContext -SubscriptionId $config.sre.subscriptionName # Copy software and/or scripts to RDS VMs $scriptPath = Join-Path $PSScriptRoot ".." "remote" "create_rds" "scripts" "Import_And_Install_Blobs.ps1" diff --git a/deployment/secure_research_environment/setup/Update_SRE_RDS_SSL_Certificate.ps1 b/deployment/secure_research_environment/setup/Update_SRE_RDS_SSL_Certificate.ps1 index 2887692ec5..72f724f488 100644 --- a/deployment/secure_research_environment/setup/Update_SRE_RDS_SSL_Certificate.ps1 +++ b/deployment/secure_research_environment/setup/Update_SRE_RDS_SSL_Certificate.ps1 @@ -29,6 +29,7 @@ Import-Module $PSScriptRoot/../../common/Logging.psm1 -Force # ------------------------------- $config = Get-SreConfig $configId $originalContext = Get-AzContext +$null = Set-AzContext -SubscriptionId $config.sre.subscriptionName # Set common variables @@ -182,7 +183,7 @@ if ($doInstall) { # Add signed KeyVault certificate to the gateway VM # ------------------------------------------------- Add-LogMessage -Level Info "Adding SSL certificate to RDS Gateway VM" - $vaultId = (Get-AzKeyVault -ResourceGroupName $config.sre.keyVault.rg -VaultName $keyVaultName).ResourceId + $vaultId = (Get-AzKeyVault -VaultName $keyVaultName -ResourceGroupName $config.sre.keyVault.rg).ResourceId $secretURL = (Get-AzKeyVaultSecret -VaultName $keyVaultName -Name $certificateName).Id $gatewayVm = Get-AzVM -ResourceGroupName $config.sre.rds.rg -Name $config.sre.rds.gateway.vmName | Remove-AzVMSecret $gatewayVm = Add-AzVMSecret -VM $gatewayVm -SourceVaultId $vaultId -CertificateStore "My" -CertificateUrl $secretURL diff --git a/environment_configs/full/sre_testasandbox_full_config.json b/environment_configs/full/sre_testasandbox_full_config.json index d5a317095e..838177383b 100644 --- a/environment_configs/full/sre_testasandbox_full_config.json +++ b/environment_configs/full/sre_testasandbox_full_config.json @@ -416,7 +416,7 @@ "description": "SG SANDBOX Research Users", "name": "SG SANDBOX Research Users" }, - "reviewUsersGroup": { + "reviewUsers": { "description": "SG SANDBOX Review Users", "name": "SG SANDBOX Review Users" }, From 59b5d08787df1c833659a4f360078bfe49161a64 Mon Sep 17 00:00:00 2001 From: James Robinson Date: Mon, 6 Jul 2020 17:51:24 +0100 Subject: [PATCH 139/155] Removed wait for cloud-init as this is included in the deployment script now --- .../setup/Setup_SRE_WebApp_Servers.ps1 | 3 --- 1 file changed, 3 deletions(-) diff --git a/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 b/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 index 6f39c4cf2d..0b831021a5 100644 --- a/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 +++ b/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 @@ -115,7 +115,6 @@ try { Add-LogMessage -Level Warning "Temporarily allowing outbound internet access from $($config.sre.webapps.gitlab.ip)..." # Note that this has no effect at present Add-NetworkSecurityGroupRule -NetworkSecurityGroup $nsgWebapps -Name "TmpAllowOutboundInternetGitlab" -SourceAddressPrefix $config.sre.webapps.gitlab.ip -Access Allow -Description "Allow outbound internet" -DestinationAddressPrefix Internet -DestinationPortRange * -Direction Outbound -Priority 100 -Protocol * -SourcePortRange * $null = Deploy-UbuntuVirtualMachine @gitlabDeploymentParams @commonDeploymentParams - Wait-ForAzVMCloudInit -Name $config.sre.webapps.gitlab.vmName -ResourceGroupName $config.sre.webapps.rg Add-VmToNSG -VMName $config.sre.webapps.gitlab.vmName -NSGName $config.sre.webapps.nsg Enable-AzVM -Name $config.sre.webapps.gitlab.vmName -ResourceGroupName $config.sre.webapps.rg } finally { @@ -153,7 +152,6 @@ try { Add-LogMessage -Level Warning "Temporarily allowing outbound internet access from $($config.sre.webapps.hackmd.ip)..." # Note that this has no effect at present Add-NetworkSecurityGroupRule -NetworkSecurityGroup $nsgWebapps -Name "TmpAllowOutboundInternetHackMD" -SourceAddressPrefix $config.sre.webapps.hackmd.ip -Access Allow -Description "Allow outbound internet" -DestinationAddressPrefix Internet -DestinationPortRange * -Direction Outbound -Priority 100 -Protocol * -SourcePortRange * $null = Deploy-UbuntuVirtualMachine @hackmdDeploymentParams @commonDeploymentParams - Wait-ForAzVMCloudInit -Name $config.sre.webapps.hackmd.vmName -ResourceGroupName $config.sre.webapps.rg Add-VmToNSG -VMName $config.sre.webapps.hackmd.vmName -NSGName $config.sre.webapps.nsg Enable-AzVM -Name $config.sre.webapps.hackmd.vmName -ResourceGroupName $config.sre.webapps.rg } finally { @@ -220,7 +218,6 @@ try { Add-LogMessage -Level Warning "Temporarily allowing outbound internet access from $($config.sre.webapps.gitlabreview.ip)..." Add-NetworkSecurityGroupRule -NetworkSecurityGroup $nsgAirlock -Name "TmpAllowOutboundInternetGitlabReview" -SourceAddressPrefix $config.sre.webapps.gitlabreview.ip -Access Allow -Description "Allow outbound internet" -DestinationAddressPrefix Internet -DestinationPortRange * -Direction Outbound -Priority 100 -Protocol * -SourcePortRange * $null = Deploy-UbuntuVirtualMachine @gitlabReviewDeploymentParams @commonDeploymentParams - Wait-ForAzVMCloudInit -Name $config.sre.webapps.gitlabreview.vmName -ResourceGroupName $config.sre.webapps.rg Enable-AzVM -Name $config.sre.webapps.gitlabreview.vmName -ResourceGroupName $config.sre.webapps.rg } finally { $null = Remove-AzNetworkSecurityRuleConfig -Name "TmpAllowOutboundInternetGitlabReview" -NetworkSecurityGroup $nsgAirlock From 6d36d721544650359c796497f8c6a77553e9bb66 Mon Sep 17 00:00:00 2001 From: James Robinson Date: Mon, 6 Jul 2020 20:31:35 +0100 Subject: [PATCH 140/155] Updates from webapps redeploy --- deployment/common/Configuration.psm1 | 23 ++++++- .../setup/Setup_SRE_VNET_RDS.ps1 | 2 +- .../setup/Setup_SRE_WebApp_Servers.ps1 | 61 ++++++++++--------- .../full/sre_testasandbox_full_config.json | 22 ++++++- 4 files changed, 71 insertions(+), 37 deletions(-) diff --git a/deployment/common/Configuration.psm1 b/deployment/common/Configuration.psm1 index eec3a6fa73..5c0ec1a9be 100644 --- a/deployment/common/Configuration.psm1 +++ b/deployment/common/Configuration.psm1 @@ -303,10 +303,10 @@ function Add-SreConfig { nsg = "NSG_SRE_$($config.sre.id)_WEBAPPS".ToUpper() gitlab = [ordered]@{ adminPasswordSecretName = "$($config.sre.shortName)-vm-admin-password-gitlab" + apiTokenSecretName = "$($config.sre.shortName)-other-gitlab-api-token" vmName = "GITLAB-SRE-$($config.sre.id)".ToUpper() vmSize = "Standard_D2s_v3" ip = Get-NextAvailableIpInRange -IpRangeCidr $config.sre.network.vnet.subnets.data.cidr -Offset 5 - rootPasswordSecretName = "$($config.sre.shortName)-other-gitlab-root-password" disks = [ordered]@{ data = [ordered]@{ sizeGb = "750" @@ -317,13 +317,22 @@ function Add-SreConfig { type = "Standard_LRS" } } + userIngress = [ordered]@{ + usernameSecretName = "$($config.sre.shortName)-other-gitlab-username-ingress-" + passwordSecretName = "$($config.sre.shortName)-other-gitlab-password-ingress" + } + userRoot = [ordered]@{ + passwordSecretName = "$($config.sre.shortName)-other-gitlab-password-root" + } + } - gitlabreview = [ordered]@{ + gitlabReview = [ordered]@{ adminPasswordSecretName = "$($config.sre.shortName)-vm-admin-password-gitlab-review" + apiTokenSecretName = "$($config.sre.shortName)-other-gitlab-review-api-token" vmName = "GITLAB-REVIEW-$($config.sre.id)".ToUpper() vmSize = "Standard_D2s_v3" ip = Get-NextAvailableIpInRange -IpRangeCidr $config.sre.network.vnet.subnets.airlock.cidr -Offset 4 - rootPasswordSecretName = "$($config.sre.shortName)-other-gitlab-review-root-password" + rootPasswordSecretName = "$($config.sre.shortName)-other-gitlab-review-password-root" disks = [ordered]@{ data = [ordered]@{ sizeGb = "750" @@ -334,9 +343,17 @@ function Add-SreConfig { type = "Standard_LRS" } } + userIngress = [ordered]@{ + usernameSecretName = "$($config.sre.shortName)-other-gitlab-review-username-ingress" + passwordSecretName = "$($config.sre.shortName)-other-gitlab-review-password-ingress" + } + userRoot = [ordered]@{ + passwordSecretName = "$($config.sre.shortName)-other-gitlab-review-password-root" + } } hackmd = [ordered]@{ adminPasswordSecretName = "$($config.sre.shortName)-vm-admin-password-hackmd" + postgresPasswordSecretName = "$($config.sre.shortName)-vm-other-hackmd-password-postgresdb" vmName = "HACKMD-SRE-$($config.sre.id)".ToUpper() vmSize = "Standard_D2s_v3" ip = Get-NextAvailableIpInRange -IpRangeCidr $config.sre.network.vnet.subnets.data.cidr -Offset 6 diff --git a/deployment/secure_research_environment/setup/Setup_SRE_VNET_RDS.ps1 b/deployment/secure_research_environment/setup/Setup_SRE_VNET_RDS.ps1 index 825f20b0cf..88a690ea1d 100644 --- a/deployment/secure_research_environment/setup/Setup_SRE_VNET_RDS.ps1 +++ b/deployment/secure_research_environment/setup/Setup_SRE_VNET_RDS.ps1 @@ -245,7 +245,7 @@ $template = Join-Path $PSScriptRoot ".." "remote" "create_rds" "templates" "Depl $template.Replace("", $domainAdminUsername). Replace("", $dsvmInitialIpAddress). Replace("", $config.sre.webapps.gitlab.ip). - Replace("", $config.sre.webapps.gitlabreview.ip). + Replace("", $config.sre.webapps.gitlabReview.ip). Replace("", $config.sre.webapps.hackmd.ip). Replace("", $rdsGatewayVmFqdn). Replace("", $rdsGatewayVmName). diff --git a/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 b/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 index 0b831021a5..974549daba 100644 --- a/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 +++ b/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 @@ -21,17 +21,17 @@ $null = Set-AzContext -SubscriptionId $config.sre.subscriptionName # ------------------------------------ Add-LogMessage -Level Info "Creating/retrieving secrets from key vault '$($config.sre.keyVault.name)'..." $gitlabAdminPassword = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -SecretName $config.sre.webapps.gitlab.adminPasswordSecretName -DefaultLength 20 -$gitlabAPIToken = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -SecretName $config.sre.keyVault.secretNames.gitlabAPIToken -$gitlabPassword = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -SecretName $config.sre.keyVault.secretNames.gitlabPassword -$gitlabReviewAPIToken = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -SecretName $config.sre.keyVault.secretNames.gitlabReviewAPIToken +$gitlabAPIToken = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -SecretName $config.sre.webapps.gitlab.apiTokenSecretName -DefaultLength 20 +$gitlabUserIngressPassword = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -SecretName $config.sre.webapps.gitlab.userIngress.passwordSecretName -DefaultLength 20 +$gitlabUserIngressUsername = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -SecretName $config.sre.webapps.gitlab.userIngress.usernameSecretName -DefaultValue "ingress" +$gitlabUserRootPassword = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -SecretName $config.sre.webapps.gitlab.userRoot.passwordSecretName -DefaultLength 20 $gitlabReviewAdminPassword = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -SecretName $config.sre.webapps.gitlabReview.adminPasswordSecretName -DefaultLength 20 -$gitlabReviewPassword = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -SecretName $config.sre.keyVault.secretNames.gitlabReviewPassword -$gitlabReviewUsername = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -SecretName $config.sre.keyVault.secretNames.gitlabReviewUsername -DefaultValue "ingress" -$gitlabRootPassword = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -SecretName $config.sre.webapps.gitlab.rootPasswordSecretName -DefaultLength 20 -$gitlabReviewRootPassword = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -SecretName $config.sre.webapps.gitlabReview.rootPasswordSecretName -DefaultLength 20 -$gitlabUsername = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -SecretName $config.sre.keyVault.secretNames.gitlabUsername -DefaultValue "ingress" +$gitlabReviewAPIToken = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -SecretName $config.sre.webapps.gitlabReview.apiTokenSecretName -DefaultLength 20 +$gitlabReviewUserIngressPassword = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -SecretName $config.sre.webapps.gitlabReview.userIngress.passwordSecretName -DefaultLength 20 +$gitlabReviewUserIngressUsername = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -SecretName $config.sre.webapps.gitlabReview.userIngress.usernameSecretName -DefaultValue "ingress" +$gitlabReviewUserRootPassword = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -SecretName $config.sre.webapps.gitlabReview.userRoot.passwordSecretName -DefaultLength 20 $hackmdAdminPassword = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -SecretName $config.sre.webapps.hackmd.adminPasswordSecretName -DefaultLength 20 -$hackmdPostgresPassword = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -SecretName $config.sre.keyVault.secretNames.hackmdUserPassword +$hackmdPostgresPassword = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -SecretName $config.sre.webapps.hackmd.postgresPasswordSecretName -DefaultLength 20 $ldapSearchUserDn = "CN=$($config.sre.users.serviceAccounts.ldapSearch.name),$($config.shm.domain.ous.serviceAccounts.path)" $ldapSearchUserPassword = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -SecretName $config.sre.users.serviceAccounts.ldapSearch.passwordSecretName -DefaultLength 20 $vmAdminUsername = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -SecretName $config.sre.keyVault.secretNames.adminUsername -DefaultValue "sre$($config.sre.id)admin".ToLower() @@ -42,21 +42,22 @@ $nsgAirlock = Deploy-NetworkSecurityGroup -Name $config.sre.network.nsg.airlock. $nsgWebapps = Deploy-NetworkSecurityGroup -Name $config.sre.webapps.nsg -ResourceGroupName $config.sre.network.vnet.rg -Location $config.sre.location -RemoveAllRules $params = @{ ipAddressGitLab = $config.sre.webapps.gitlab.ip - ipAddressGitLabReview = $config.sre.webapps.gitlabreview.ip + ipAddressGitLabReview = $config.sre.webapps.gitlabReview.ip ipAddressSessionHostApps = $config.sre.rds.sessionHost1.ip ipAddressSessionHostReview = $config.sre.rds.sessionHost3.ip nsgAirlockName = $config.sre.network.nsg.airlock.name nsgWebappsName = $config.sre.webapps.nsg - subnetComputeCidr = $config.sre.network.subnets.data.cidr + subnetComputeCidr = $config.sre.network.vnet.subnets.data.cidr subnetVpnCidr = "172.16.201.0/24" # TODO fix this when it is no longer hard-coded } Deploy-ArmTemplate -TemplatePath (Join-Path $PSScriptRoot ".." "arm_templates" "sre-nsg-rules-template.json") -Params $params -ResourceGroupName $config.sre.network.vnet.rg + # Check that VNET and subnets exist # --------------------------------- $vnet = Deploy-VirtualNetwork -Name $config.sre.network.vnet.name -ResourceGroupName $config.sre.network.vnet.rg -AddressPrefix $config.sre.network.vnet.cidr -Location $config.sre.location -$subnetAirlock = Deploy-Subnet -Name $config.sre.network.subnets.airlock.name -VirtualNetwork $vnet -AddressPrefix $config.sre.network.subnets.airlock.cidr -$subnetWebapps = Deploy-Subnet -Name $config.sre.network.subnets.data.name -VirtualNetwork $vnet -AddressPrefix $config.sre.network.subnets.data.cidr # NB. this is currently the SharedData subnet but will change soon +$subnetAirlock = Deploy-Subnet -Name $config.sre.network.vnet.subnets.airlock.name -VirtualNetwork $vnet -AddressPrefix $config.sre.network.vnet.subnets.airlock.cidr +$subnetWebapps = Deploy-Subnet -Name $config.sre.network.vnet.subnets.data.name -VirtualNetwork $vnet -AddressPrefix $config.sre.network.vnet.subnets.data.cidr # NB. this is currently the SharedData subnet but will change soon # Attach NSGs to subnets @@ -94,10 +95,10 @@ $gitlabCloudInit = (Join-Path $PSScriptRoot ".." "cloud_init" "cloud-init-gitla Replace('', $config.sre.webapps.gitlab.ip). Replace('', $config.sre.webapps.gitlab.hostname). Replace('', "$($config.sre.webapps.gitlab.hostname).$($config.sre.domain.fqdn)"). - Replace('', $gitlabRootPassword). + Replace('', $gitlabUserRootPassword). Replace('', $config.shm.domain.fqdn). - Replace('', $gitlabUsername). - Replace('', $gitlabPassword). + Replace('', $gitlabUserIngressUsername). + Replace('', $gitlabUserIngressPassword). Replace('', $gitlabAPIToken) # Set GitLab deployment parameters $gitlabDataDisk = Deploy-ManagedDisk -Name "$($config.sre.webapps.gitlab.vmName)-DATA-DISK" -SizeGB 512 -Type "Standard_LRS" -ResourceGroupName $config.sre.webapps.rg -Location $config.sre.location @@ -115,7 +116,7 @@ try { Add-LogMessage -Level Warning "Temporarily allowing outbound internet access from $($config.sre.webapps.gitlab.ip)..." # Note that this has no effect at present Add-NetworkSecurityGroupRule -NetworkSecurityGroup $nsgWebapps -Name "TmpAllowOutboundInternetGitlab" -SourceAddressPrefix $config.sre.webapps.gitlab.ip -Access Allow -Description "Allow outbound internet" -DestinationAddressPrefix Internet -DestinationPortRange * -Direction Outbound -Priority 100 -Protocol * -SourcePortRange * $null = Deploy-UbuntuVirtualMachine @gitlabDeploymentParams @commonDeploymentParams - Add-VmToNSG -VMName $config.sre.webapps.gitlab.vmName -NSGName $config.sre.webapps.nsg + Add-VmToNSG -VMName $config.sre.webapps.gitlab.vmName -NSGName $config.sre.webapps.nsg -VmResourceGroupName $config.sre.webapps.rg -NsgResourceGroupName $config.sre.network.vnet.rg Enable-AzVM -Name $config.sre.webapps.gitlab.vmName -ResourceGroupName $config.sre.webapps.rg } finally { $null = Remove-AzNetworkSecurityRuleConfig -Name "TmpAllowOutboundInternetGitlab" -NetworkSecurityGroup $nsgWebapps @@ -152,7 +153,7 @@ try { Add-LogMessage -Level Warning "Temporarily allowing outbound internet access from $($config.sre.webapps.hackmd.ip)..." # Note that this has no effect at present Add-NetworkSecurityGroupRule -NetworkSecurityGroup $nsgWebapps -Name "TmpAllowOutboundInternetHackMD" -SourceAddressPrefix $config.sre.webapps.hackmd.ip -Access Allow -Description "Allow outbound internet" -DestinationAddressPrefix Internet -DestinationPortRange * -Direction Outbound -Priority 100 -Protocol * -SourcePortRange * $null = Deploy-UbuntuVirtualMachine @hackmdDeploymentParams @commonDeploymentParams - Add-VmToNSG -VMName $config.sre.webapps.hackmd.vmName -NSGName $config.sre.webapps.nsg + Add-VmToNSG -VMName $config.sre.webapps.hackmd.vmName -NSGName $config.sre.webapps.nsg -VmResourceGroupName $config.sre.webapps.rg -NsgResourceGroupName $config.sre.network.vnet.rg Enable-AzVM -Name $config.sre.webapps.hackmd.vmName -ResourceGroupName $config.sre.webapps.rg } finally { $null = Remove-AzNetworkSecurityRuleConfig -Name "TmpAllowOutboundInternetHackMD" -NetworkSecurityGroup $nsgWebapps @@ -182,13 +183,13 @@ $gitlabReviewCloudInit = (Join-Path $PSScriptRoot ".." "cloud_init" "cloud-init Replace('', $ldapSearchUserPassword). Replace('', $config.shm.domain.userOuPath). Replace('', "(&(objectClass=user)(memberOf=CN=$($config.sre.domain.securityGroups.reviewUsers.name),$($config.shm.domain.securityOuPath)))"). - Replace('', $config.sre.webapps.gitlabreview.ip). - Replace('', $config.sre.webapps.gitlabreview.hostname). - Replace('', "$($config.sre.webapps.gitlabreview.hostname).$($config.sre.domain.fqdn)"). - Replace('', $gitlabReviewRootPassword). + Replace('', $config.sre.webapps.gitlabReview.ip). + Replace('', $config.sre.webapps.gitlabReview.hostname). + Replace('', "$($config.sre.webapps.gitlabReview.hostname).$($config.sre.domain.fqdn)"). + Replace('', $gitlabReviewUserRootPassword). Replace('', $config.shm.domain.fqdn). - Replace('', $gitlabReviewUsername). - Replace('', $gitlabReviewPassword). + Replace('', $gitlabReviewUserIngressUsername). + Replace('', $gitlabReviewUserIngressPassword). Replace('', $gitlabReviewAPIToken) # Insert SSH keys and scripts into cloud init template, maintaining indentation $indent = " " @@ -208,17 +209,17 @@ foreach ($scriptName in @("zipfile_to_gitlab_project.py", $gitlabReviewDeploymentParams = @{ AdminPassword = $gitlabReviewAdminPassword CloudInitYaml = $gitlabReviewCloudInit - Name = $config.sre.webapps.gitlabreview.vmName - PrivateIpAddress = $config.sre.webapps.gitlabreview.ip - Size = $config.sre.webapps.gitlabreview.vmSize + Name = $config.sre.webapps.gitlabReview.vmName + PrivateIpAddress = $config.sre.webapps.gitlabReview.ip + Size = $config.sre.webapps.gitlabReview.vmSize Subnet = $subnetAirlock } # Deploy GitLab review VM try { - Add-LogMessage -Level Warning "Temporarily allowing outbound internet access from $($config.sre.webapps.gitlabreview.ip)..." - Add-NetworkSecurityGroupRule -NetworkSecurityGroup $nsgAirlock -Name "TmpAllowOutboundInternetGitlabReview" -SourceAddressPrefix $config.sre.webapps.gitlabreview.ip -Access Allow -Description "Allow outbound internet" -DestinationAddressPrefix Internet -DestinationPortRange * -Direction Outbound -Priority 100 -Protocol * -SourcePortRange * + Add-LogMessage -Level Warning "Temporarily allowing outbound internet access from $($config.sre.webapps.gitlabReview.ip)..." + Add-NetworkSecurityGroupRule -NetworkSecurityGroup $nsgAirlock -Name "TmpAllowOutboundInternetGitlabReview" -SourceAddressPrefix $config.sre.webapps.gitlabReview.ip -Access Allow -Description "Allow outbound internet" -DestinationAddressPrefix Internet -DestinationPortRange * -Direction Outbound -Priority 100 -Protocol * -SourcePortRange * $null = Deploy-UbuntuVirtualMachine @gitlabReviewDeploymentParams @commonDeploymentParams - Enable-AzVM -Name $config.sre.webapps.gitlabreview.vmName -ResourceGroupName $config.sre.webapps.rg + Enable-AzVM -Name $config.sre.webapps.gitlabReview.vmName -ResourceGroupName $config.sre.webapps.rg } finally { $null = Remove-AzNetworkSecurityRuleConfig -Name "TmpAllowOutboundInternetGitlabReview" -NetworkSecurityGroup $nsgAirlock } diff --git a/environment_configs/full/sre_testasandbox_full_config.json b/environment_configs/full/sre_testasandbox_full_config.json index 838177383b..6db229faef 100644 --- a/environment_configs/full/sre_testasandbox_full_config.json +++ b/environment_configs/full/sre_testasandbox_full_config.json @@ -624,6 +624,7 @@ "webapps": { "gitlab": { "adminPasswordSecretName": "sre-sandbox-webappvm-admin-password", + "apiTokenSecretName": "sre-sandbox-other-gitlab-api-token", "disks": { "data": { "sizeGb": "750", @@ -637,12 +638,19 @@ "fqdn": "GITLAB-SRE-SANDBOX.testa.dsgroupdev.co.uk", "hostname": "GITLAB-SRE-SANDBOX", "ip": "10.150.2.151", - "rootPasswordSecretName": "sre-sandbox-other-gitlab-root-password", + "userIngress": { + "passwordSecretName": "sre-sandbox-other-gitlab-password-ingress", + "usernameSecretName": "sre-sandbox-other-gitlab-username-ingress-" + }, + "userRoot": { + "passwordSecretName": "sre-sandbox-other-gitlab-password-root" + }, "vmName": "GITLAB-SRE-SANDBOX", "vmSize": "Standard_D2s_v3" }, - "gitlabreview": { + "gitlabReview": { "adminPasswordSecretName": "sre-sandbox-vm-admin-password-gitlab-review", + "apiTokenSecretName": "sre-sandbox-other-gitlab-review-api-token", "disks": { "data": { "sizeGb": "750", @@ -656,7 +664,14 @@ "fqdn": "GITLAB-REVIEW-SANDBOX.testa.dsgroupdev.co.uk", "hostname": "GITLAB-REVIEW-SANDBOX", "ip": "10.150.4.4", - "rootPasswordSecretName": "sre-sandbox-other-gitlab-review-root-password", + "rootPasswordSecretName": "sre-sandbox-other-gitlab-review-password-root", + "userIngress": { + "passwordSecretName": "sre-sandbox-other-gitlab-review-password-ingress", + "usernameSecretName": "sre-sandbox-other-gitlab-review-username-ingress" + }, + "userRoot": { + "passwordSecretName": "sre-sandbox-other-gitlab-review-password-root" + }, "vmName": "GITLAB-REVIEW-SANDBOX", "vmSize": "Standard_D2s_v3" }, @@ -671,6 +686,7 @@ "fqdn": "HACKMD-SRE-SANDBOX.testa.dsgroupdev.co.uk", "hostname": "HACKMD-SRE-SANDBOX", "ip": "10.150.2.152", + "postgresPasswordSecretName": "sre-sandbox-vm-other-hackmd-password-postgresdb", "vmName": "HACKMD-SRE-SANDBOX", "vmSize": "Standard_D2s_v3" }, From a80cb82308efacaa9aa5ec2d584f5bcb2b1061a0 Mon Sep 17 00:00:00 2001 From: Oliver Strickson Date: Fri, 3 Jul 2020 15:35:23 +0100 Subject: [PATCH 141/155] Pass subprocess.run args as a list --- .../cloud_init/scripts/zipfile_to_gitlab_project.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py b/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py index d74691aff6..889ad68b3a 100644 --- a/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py +++ b/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py @@ -35,7 +35,7 @@ def unzip_zipfiles(zipfile_dir): zipfile_path = os.path.join(zipfile_subdir, "repo.zip") unpacked_location = os.path.join(zipfile_subdir, "repo") # ensure "repo" does not already exist (from a previous failed attempt) - subprocess.run("rm", "-rf", unpacked_location, check=True) + subprocess.run(["rm", "-rf", unpacked_location], check=True) try: with ZipFile(zipfile_path, "r") as zip_obj: zip_obj.extractall(path=zipfile_subdir) @@ -572,7 +572,7 @@ def unzipped_snapshot_to_merge_request(gitlab_config, snapshot_details, namespac ) # cleanup this zipfile and its extracted contents - subprocess.run("rm", "-rf", snapshot_path, check=True) + subprocess.run(["rm", "-rf", snapshot_path], check=True) def main(): From 48d2be3e3445ffa28afaab596a552c4f3ddaac13 Mon Sep 17 00:00:00 2001 From: Oliver Strickson Date: Tue, 7 Jul 2020 17:26:44 +0100 Subject: [PATCH 142/155] Use correct LDAP OU for research users on webapp VMs --- .../setup/Setup_SRE_WebApp_Servers.ps1 | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 b/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 index 974549daba..62ffe8501b 100644 --- a/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 +++ b/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 @@ -89,7 +89,7 @@ $commonDeploymentParams = @{ $gitlabCloudInit = (Join-Path $PSScriptRoot ".." "cloud_init" "cloud-init-gitlab.template.yaml" | Get-Item | Get-Content -Raw). Replace('', $ldapSearchUserDn). Replace('', $ldapSearchUserPassword). - Replace('', $config.shm.domain.userOuPath). + Replace('', $config.shm.domain.ous.researchUsers.path). Replace('', "(&(objectClass=user)(memberOf=CN=$($config.sre.domain.securityGroups.researchUsers.name),$($config.shm.domain.securityOuPath)))"). Replace('', "$($config.shm.dc.hostname).$($config.shm.domain.fqdn)"). Replace('', $config.sre.webapps.gitlab.ip). @@ -130,7 +130,7 @@ $hackmdCloudInit = (Join-Path $PSScriptRoot ".." "cloud_init" "cloud-init-hackm Replace('', $ldapSearchUserDn). Replace('', $ldapSearchUserPassword). Replace('', "(&(objectClass=user)(memberOf=CN=$($config.sre.domain.securityGroups.researchUsers.name),$($config.shm.domain.securityOuPath))(userPrincipalName={{username}}))"). - Replace('', $config.shm.domain.userOuPath). + Replace('', $config.shm.domain.ous.researchUsers.path). Replace('', $config.sre.webapps.hackmd.ip). Replace('', $config.sre.webapps.hackmd.hostname). Replace('', "$($config.sre.webapps.hackmd.hostname).$($config.sre.domain.fqdn)"). @@ -181,7 +181,7 @@ $gitlabReviewCloudInit = (Join-Path $PSScriptRoot ".." "cloud_init" "cloud-init Replace('', "$($config.shm.dc.hostname).$($config.shm.domain.fqdn)"). Replace('', $ldapSearchUserDn). Replace('', $ldapSearchUserPassword). - Replace('', $config.shm.domain.userOuPath). + Replace('', $config.shm.domain.ous.researchUsers.path). Replace('', "(&(objectClass=user)(memberOf=CN=$($config.sre.domain.securityGroups.reviewUsers.name),$($config.shm.domain.securityOuPath)))"). Replace('', $config.sre.webapps.gitlabReview.ip). Replace('', $config.sre.webapps.gitlabReview.hostname). From 2f505d2b7059e9cd930a021ebaacc3635319fc9d Mon Sep 17 00:00:00 2001 From: Oliver Strickson Date: Thu, 9 Jul 2020 09:33:32 +0100 Subject: [PATCH 143/155] Make the GitLab service user the owner of the zipfiles --- .../administration/SRE_Upload_Git_Repo_to_GitlabReview.ps1 | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/deployment/administration/SRE_Upload_Git_Repo_to_GitlabReview.ps1 b/deployment/administration/SRE_Upload_Git_Repo_to_GitlabReview.ps1 index 226b0ddb2b..c01abb2ff7 100644 --- a/deployment/administration/SRE_Upload_Git_Repo_to_GitlabReview.ps1 +++ b/deployment/administration/SRE_Upload_Git_Repo_to_GitlabReview.ps1 @@ -60,7 +60,7 @@ Set-Location $snapshotPath git checkout $sourceCommitHash Remove-Item -Path (Join-Path $snapshotPath ".git") -Recurse -Force -## Record some metadata about the repository +# Record some metadata about the repository Set-Location $repoPath $sourceGitURL > sourceGitURL $targetRepoName > targetRepoName @@ -104,14 +104,13 @@ Add-LogMessage -Level Success "Constructed upload URL $remoteUrl" # Download the zipfile onto the remote machine # -------------------------------------------- -$sreAdminUsername = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.Name -SecretName $config.sre.keyVault.secretNames.adminUsername -DefaultValue "sre$($config.sre.id)admin".ToLower() # Create remote script (make a subdirectory of /tmp/zipfiles and run CURL to download blob to there) $script = @" #!/bin/bash mkdir -p /tmp/zipfiles/ tmpdir=`$(mktemp -d /tmp/zipfiles/XXXXXXXXXXXXXXXXXXXX) curl -X GET -o `$tmpdir/${zipFileName} "${remoteUrl}" -chown -R ${sreAdminUsername}:${sreAdminUsername} /tmp/zipfiles/ +chown -R gitlabdaemon:gitlabdaemon /tmp/zipfiles/ "@ Add-LogMessage -Level Info "[ ] Running remote script to download zipfile onto $($config.sre.webapps.gitlabreview.vmName)" $result = Invoke-RemoteScript -Shell "UnixShell" -Script $script -VMName $config.sre.webapps.gitlabreview.vmName -ResourceGroupName $config.sre.webapps.rg From 8041233068440779dc311478fdcda6ea79e75085 Mon Sep 17 00:00:00 2001 From: Oliver Strickson Date: Thu, 9 Jul 2020 09:36:00 +0100 Subject: [PATCH 144/155] Fix: string interpolation --- .../cloud_init/scripts/gitlab_config.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/deployment/secure_research_environment/cloud_init/scripts/gitlab_config.py b/deployment/secure_research_environment/cloud_init/scripts/gitlab_config.py index dcfc1c58d0..44da5516bb 100755 --- a/deployment/secure_research_environment/cloud_init/scripts/gitlab_config.py +++ b/deployment/secure_research_environment/cloud_init/scripts/gitlab_config.py @@ -9,8 +9,8 @@ def http_error(msg, response): return requests.HTTPError( - msg + ": Unexpected response: " + response.reason + " (" - + response.status_code + "), content: " + response.text + f"{msg}: Unexpected response: {response.reason} ({response.status_code))" + f", content: {response.text}" ) From a77afc9557ea2e603c3c83ee76b6c164d8c498ce Mon Sep 17 00:00:00 2001 From: Oliver Strickson Date: Thu, 9 Jul 2020 09:40:24 +0100 Subject: [PATCH 145/155] Recover from partially-completed run: ensure cloned repo doesn't already exist --- .../cloud_init/scripts/zipfile_to_gitlab_project.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py b/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py index 889ad68b3a..5ee7144633 100644 --- a/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py +++ b/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py @@ -390,6 +390,9 @@ def clone_commit_and_push( commit_hash: str, the commit hash of the snapshot of the upstream project """ + # Ensure the cloned repo does not already exist (from an interrupted attempt) + subprocess.run(["rm", "-rf", "cloned_repo"], cwd=tmp_repo_dir, check=True) + # Clone the repo subprocess.run(["git", "clone", remote_url, "cloned_repo"], cwd=tmp_repo_dir, check=True) working_dir = os.path.join(tmp_repo_dir, "cloned_repo") From b7ca0daf7e9f4d9c3e69cc4b2cb3ecf91da43ba9 Mon Sep 17 00:00:00 2001 From: Oliver Strickson Date: Thu, 9 Jul 2020 09:40:38 +0100 Subject: [PATCH 146/155] Fix: argument order --- .../cloud_init/scripts/zipfile_to_gitlab_project.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py b/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py index 5ee7144633..cb43de1fa2 100644 --- a/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py +++ b/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py @@ -523,7 +523,7 @@ def unzipped_snapshot_to_merge_request(gitlab_config, snapshot_details, namespac unzipped_location = os.path.join(snapshot_path, "repo") target_project_info = get_or_create_project( - gitlab_config, "approved", repo_name, namespace_ids, + gitlab_config, namespace_ids, "approved", repo_name, ) target_project_id = target_project_info["id"] target_project_url = target_project_info["ssh_url_to_repo"] From 8d477fc9c5b9e1c7096ba19cc87bdc96fcf29e9e Mon Sep 17 00:00:00 2001 From: Oliver Strickson Date: Thu, 9 Jul 2020 09:47:45 +0100 Subject: [PATCH 147/155] Lint --- .../cloud_init/scripts/gitlab_config.py | 2 +- .../cloud_init/scripts/zipfile_to_gitlab_project.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/deployment/secure_research_environment/cloud_init/scripts/gitlab_config.py b/deployment/secure_research_environment/cloud_init/scripts/gitlab_config.py index 44da5516bb..dadb7afe37 100755 --- a/deployment/secure_research_environment/cloud_init/scripts/gitlab_config.py +++ b/deployment/secure_research_environment/cloud_init/scripts/gitlab_config.py @@ -9,7 +9,7 @@ def http_error(msg, response): return requests.HTTPError( - f"{msg}: Unexpected response: {response.reason} ({response.status_code))" + f"{msg}: Unexpected response: {response.reason} ({response.status_code})" f", content: {response.text}" ) diff --git a/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py b/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py index cb43de1fa2..325236955b 100644 --- a/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py +++ b/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py @@ -385,7 +385,7 @@ def clone_commit_and_push( target_branch_name: str, the name of the branch to push to remote_url: str, the URL for this project on gitlab-review to be added as a remote ("unapproved"). - target_project_url: str, the url of the original imported project on + target_project_url: str, the url of the original imported project on gitlab-review ("approved") commit_hash: str, the commit hash of the snapshot of the upstream project """ From 6173b11ba018b04c2ea45eab7877de91f0ceb5fc Mon Sep 17 00:00:00 2001 From: Oliver Strickson Date: Thu, 9 Jul 2020 20:21:53 +0100 Subject: [PATCH 148/155] Fix typo: gitlabreview -> gitlabReview --- .../administration/SRE_Upload_Git_Repo_to_GitlabReview.ps1 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/deployment/administration/SRE_Upload_Git_Repo_to_GitlabReview.ps1 b/deployment/administration/SRE_Upload_Git_Repo_to_GitlabReview.ps1 index c01abb2ff7..68063cf4f9 100644 --- a/deployment/administration/SRE_Upload_Git_Repo_to_GitlabReview.ps1 +++ b/deployment/administration/SRE_Upload_Git_Repo_to_GitlabReview.ps1 @@ -113,7 +113,7 @@ curl -X GET -o `$tmpdir/${zipFileName} "${remoteUrl}" chown -R gitlabdaemon:gitlabdaemon /tmp/zipfiles/ "@ Add-LogMessage -Level Info "[ ] Running remote script to download zipfile onto $($config.sre.webapps.gitlabreview.vmName)" -$result = Invoke-RemoteScript -Shell "UnixShell" -Script $script -VMName $config.sre.webapps.gitlabreview.vmName -ResourceGroupName $config.sre.webapps.rg +$result = Invoke-RemoteScript -Shell "UnixShell" -Script $script -VMName $config.sre.webapps.gitlabReview.vmName -ResourceGroupName $config.sre.webapps.rg Write-Output $result.Value From bb6ca858c171c8ea4b97e16f885366246b7a725e Mon Sep 17 00:00:00 2001 From: Oliver Strickson Date: Thu, 9 Jul 2020 20:22:18 +0100 Subject: [PATCH 149/155] Check for correct (created) status code --- .../cloud_init/scripts/zipfile_to_gitlab_project.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py b/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py index 325236955b..a70161ca17 100644 --- a/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py +++ b/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py @@ -88,7 +88,7 @@ def create_project(gitlab_config, repo_name, namespace_id): }, ) - if response.status_code != 200: + if response.status_code != 201: # created raise gl.http_error("Creating project", response) project_info = response.json() From cc4977bc6df1a2b5bb35ecb006a2658fafec9f20 Mon Sep 17 00:00:00 2001 From: Oliver Strickson Date: Thu, 9 Jul 2020 20:23:02 +0100 Subject: [PATCH 150/155] Inline clone, commit and push --- .../scripts/zipfile_to_gitlab_project.py | 155 +++++++----------- 1 file changed, 59 insertions(+), 96 deletions(-) diff --git a/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py b/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py index a70161ca17..b53a9c6f02 100644 --- a/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py +++ b/deployment/secure_research_environment/cloud_init/scripts/zipfile_to_gitlab_project.py @@ -363,86 +363,6 @@ def create_merge_request_if_not_exists( raise gl.http_error(f"Creating merge request for {repo_name}", response) -def clone_commit_and_push( - path_to_unzipped_repo, - tmp_repo_dir, - branch_name, - target_branch_name, - remote_url, - target_project_url, - commit_hash, -): - """ - Run shell commands to convert the unzipped directory containing the - repository contents into a git repo, then commit it on the branch - with the requested name. - - Parameters - ========== - path_to_unzipped_repo: str, the full directory path to the unzipped repo - tmp_repo_dir: str, path to a temporary dir where we will clone the project - branch_name: str, the name of the branch holding the snapshot - target_branch_name: str, the name of the branch to push to - remote_url: str, the URL for this project on gitlab-review to be added - as a remote ("unapproved"). - target_project_url: str, the url of the original imported project on - gitlab-review ("approved") - commit_hash: str, the commit hash of the snapshot of the upstream project - """ - - # Ensure the cloned repo does not already exist (from an interrupted attempt) - subprocess.run(["rm", "-rf", "cloned_repo"], cwd=tmp_repo_dir, check=True) - - # Clone the repo - subprocess.run(["git", "clone", remote_url, "cloned_repo"], cwd=tmp_repo_dir, check=True) - working_dir = os.path.join(tmp_repo_dir, "cloned_repo") - assert os.path.exists(working_dir) - - # Add upstream (target repo) to this repo - subprocess.run( - ["git", "remote", "add", "approved", target_project_url], - cwd=working_dir, - check=True, - ) - subprocess.run(["git", "fetch", "approved"], cwd=working_dir, check=True) - - # Checkout the target branch if it exists - git_checkout_result = subprocess.run( - ["git", "checkout", target_branch_name], cwd=working_dir, check=False - ) - if git_checkout_result.returncode == 0: - subprocess.run(["git", "pull", "approved"], cwd=working_dir, check=True) - - # now checkout the branch holding the snapshot - subprocess.run(["git", "checkout", "-b", branch_name], cwd=working_dir, check=True) - - # Remove the contents of the cloned repo (everything except .git) - for item in os.listdir(working_dir): - if item != ".git": - subprocess.run(["rm", "-rf", item], cwd=working_dir, check=True) - - # Copy the unzipped repo contents into our cloned (empty) repo - for item in os.listdir(path_to_unzipped_repo): - subprocess.run( - ["cp", "-r", os.path.join(path_to_unzipped_repo, item), "."], - cwd=working_dir, - check=True, - ) - - # Commit everything to this branch, also putting commit hash into message - subprocess.run(["git", "add", "."], cwd=working_dir, check=True) - commit_msg = "Import snapshot of {} at commit {}".format(remote_url, commit_hash) - subprocess.run(["git", "commit", "-m", commit_msg], cwd=working_dir, check=True) - # Push back to gitlab review (unapproved) - subprocess.run( - ["git", "push", "-f", "--set-upstream", "origin", branch_name], - cwd=working_dir, - check=True, - ) - - logger.info("Pushed to %s, branch %s", remote_url, branch_name) - - def fork_project( gitlab_config, fork_namespace, @@ -527,11 +447,7 @@ def unzipped_snapshot_to_merge_request(gitlab_config, snapshot_details, namespac ) target_project_id = target_project_info["id"] target_project_url = target_project_info["ssh_url_to_repo"] - logger.info("Created project approved/%s ", repo_name) - - # Branch to create on the source (unapproved) repository of the - # matches that of the target - src_branch_name = f"commit-{commit_hash}" + logger.info("Created or found project at approved/%s", repo_name) # Fork this project to "unapproved" group src_project_info = fork_project( @@ -542,19 +458,66 @@ def unzipped_snapshot_to_merge_request(gitlab_config, snapshot_details, namespac fork_namespace_id=namespace_ids["unapproved"], ) src_project_id = src_project_info["id"] - remote_url = src_project_info["ssh_url_to_repo"] - logger.info("Fork of project at unapproved/%s", repo_name) + src_remote_url = src_project_info["ssh_url_to_repo"] + logger.info("Created or found fork of project at unapproved/%s", repo_name) - # Do the command-line git stuff to push to unapproved project - clone_commit_and_push( - unzipped_location, - snapshot_path, - src_branch_name, - target_branch_name, - remote_url, - target_project_url, - commit_hash, + # Ensure the cloned repo does not already exist (from an interrupted attempt) + subprocess.run(["rm", "-rf", "cloned_repo"], cwd=snapshot_path, check=True) + + # Clone the repo + subprocess.run(["git", "clone", src_remote_url, "cloned_repo"], cwd=snapshot_path, check=True) + working_dir = os.path.join(snapshot_path, "cloned_repo") + assert os.path.exists(working_dir) + logger.info("Created local working copy of %s", src_remote_url) + + # Add upstream (target repo) to this repo + subprocess.run( + ["git", "remote", "add", "approved", target_project_url], + cwd=working_dir, + check=True, ) + subprocess.run(["git", "fetch", "approved"], cwd=working_dir, check=True) + logger.info("Fetched branches from approved/%s", repo_name) + + # Checkout the target branch if it exists + git_checkout_result = subprocess.run( + ["git", "checkout", target_branch_name], cwd=working_dir, check=False + ) + if git_checkout_result.returncode == 0: + subprocess.run(["git", "pull", "approved"], cwd=working_dir, check=True) + + # Branch to create on the source (unapproved) repository of the + # matches that of the target + src_branch_name = f"commit-{commit_hash}" + + # now checkout the branch holding the snapshot + subprocess.run(["git", "checkout", "-b", src_branch_name], cwd=working_dir, check=True) + + # Remove the contents of the cloned repo (everything except .git) + for item in os.listdir(working_dir): + if item != ".git": + subprocess.run(["rm", "-rf", item], cwd=working_dir, check=True) + + # Copy the unzipped repo contents into our cloned (empty) repo + for item in os.listdir(unzipped_location): + subprocess.run( + ["cp", "-r", os.path.join(unzipped_location, item), "."], + cwd=working_dir, + check=True, + ) + + # Commit everything to this branch, also putting commit hash into message + subprocess.run(["git", "add", "."], cwd=working_dir, check=True) + commit_msg = "Import snapshot of {} at commit {}".format(src_remote_url, commit_hash) + subprocess.run(["git", "commit", "-m", commit_msg], cwd=working_dir, check=True) + # Push back to gitlab review (unapproved) + subprocess.run( + ["git", "push", "-f", "--set-upstream", "origin", src_branch_name], + cwd=working_dir, + check=True, + ) + + logger.info("Pushed to %s, branch %s", src_remote_url, src_branch_name) # Create the branch on the "approved" project if it doesn't already exist create_branch_if_not_exists( From 2ab4d54cf3cd857b8f1676e748866cafac921b40 Mon Sep 17 00:00:00 2001 From: Oliver Strickson Date: Thu, 9 Jul 2020 20:25:04 +0100 Subject: [PATCH 151/155] gitlabUsername -> gitlabUserIngressUsername for cloud-init substitution --- .../setup/Setup_SRE_WebApp_Servers.ps1 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 b/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 index 62ffe8501b..98cd66cbc2 100644 --- a/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 +++ b/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 @@ -176,7 +176,7 @@ $sshKeys = $result.Value[0].Message | Select-String "\[stdout\]\s*([\s\S]*?)\s*\ # Construct GitLab review cloudinit $gitlabReviewCloudInit = (Join-Path $PSScriptRoot ".." "cloud_init" "cloud-init-gitlab-review.template.yaml" | Get-Item | Get-Content -Raw). Replace('', $config.sre.webapps.gitlab.ip). - Replace('', $gitlabUsername). + Replace('', $gitlabUserIngressUsername). Replace('', $gitlabAPIToken). Replace('', "$($config.shm.dc.hostname).$($config.shm.domain.fqdn)"). Replace('', $ldapSearchUserDn). From 678be5b8fd2e711b29b9c754dda80a18e29268de Mon Sep 17 00:00:00 2001 From: Oliver Strickson Date: Thu, 9 Jul 2020 20:25:56 +0100 Subject: [PATCH 152/155] Partial fix to NSG rules --- .../setup/Setup_SRE_WebApp_Servers.ps1 | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 b/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 index 98cd66cbc2..af7270dcef 100644 --- a/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 +++ b/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 @@ -63,7 +63,9 @@ $subnetWebapps = Deploy-Subnet -Name $config.sre.network.vnet.subnets.data.name # Attach NSGs to subnets # ---------------------- $subnetAirlock = Set-SubnetNetworkSecurityGroup -Subnet $subnetAirlock -VirtualNetwork $vnet -NetworkSecurityGroup $nsgAirlock - +$nsgAirlock = Get-AzNetworkSecurityGroup -Name $config.sre.network.nsg.airlock.name -ResourceGroupName $config.sre.network.vnet.rg +$subnetWebapps = Set-SubnetNetworkSecurityGroup -Subnet $subnetWebapps -VirtualNetwork $vnet -NetworkSecurityGroup $nsgWebapps +$nsgWebapps = Get-AzNetworkSecurityGroup -Name $config.sre.webapps.nsg -ResourceGroupName $config.sre.network.vnet.rg # Create webapps resource group # -------------------------------- @@ -115,11 +117,13 @@ $gitlabDeploymentParams = @{ try { Add-LogMessage -Level Warning "Temporarily allowing outbound internet access from $($config.sre.webapps.gitlab.ip)..." # Note that this has no effect at present Add-NetworkSecurityGroupRule -NetworkSecurityGroup $nsgWebapps -Name "TmpAllowOutboundInternetGitlab" -SourceAddressPrefix $config.sre.webapps.gitlab.ip -Access Allow -Description "Allow outbound internet" -DestinationAddressPrefix Internet -DestinationPortRange * -Direction Outbound -Priority 100 -Protocol * -SourcePortRange * + $nsgWebapps = Get-AzNetworkSecurityGroup -Name $config.sre.webapps.nsg -ResourceGroupName $config.sre.network.vnet.rg + $null = Deploy-UbuntuVirtualMachine @gitlabDeploymentParams @commonDeploymentParams Add-VmToNSG -VMName $config.sre.webapps.gitlab.vmName -NSGName $config.sre.webapps.nsg -VmResourceGroupName $config.sre.webapps.rg -NsgResourceGroupName $config.sre.network.vnet.rg Enable-AzVM -Name $config.sre.webapps.gitlab.vmName -ResourceGroupName $config.sre.webapps.rg } finally { - $null = Remove-AzNetworkSecurityRuleConfig -Name "TmpAllowOutboundInternetGitlab" -NetworkSecurityGroup $nsgWebapps + $nsgWebapps = Remove-AzNetworkSecurityRuleConfig -Name "TmpAllowOutboundInternetGitlab" -NetworkSecurityGroup $nsgWebapps } @@ -152,11 +156,12 @@ $hackmdDeploymentParams = @{ try { Add-LogMessage -Level Warning "Temporarily allowing outbound internet access from $($config.sre.webapps.hackmd.ip)..." # Note that this has no effect at present Add-NetworkSecurityGroupRule -NetworkSecurityGroup $nsgWebapps -Name "TmpAllowOutboundInternetHackMD" -SourceAddressPrefix $config.sre.webapps.hackmd.ip -Access Allow -Description "Allow outbound internet" -DestinationAddressPrefix Internet -DestinationPortRange * -Direction Outbound -Priority 100 -Protocol * -SourcePortRange * + $nsgWebapps = Get-AzNetworkSecurityGroup -Name $config.sre.webapps.nsg -ResourceGroupName $config.sre.network.vnet.rg $null = Deploy-UbuntuVirtualMachine @hackmdDeploymentParams @commonDeploymentParams Add-VmToNSG -VMName $config.sre.webapps.hackmd.vmName -NSGName $config.sre.webapps.nsg -VmResourceGroupName $config.sre.webapps.rg -NsgResourceGroupName $config.sre.network.vnet.rg Enable-AzVM -Name $config.sre.webapps.hackmd.vmName -ResourceGroupName $config.sre.webapps.rg } finally { - $null = Remove-AzNetworkSecurityRuleConfig -Name "TmpAllowOutboundInternetHackMD" -NetworkSecurityGroup $nsgWebapps + $nsgWebapps = Remove-AzNetworkSecurityRuleConfig -Name "TmpAllowOutboundInternetHackMD" -NetworkSecurityGroup $nsgWebapps } @@ -218,10 +223,11 @@ $gitlabReviewDeploymentParams = @{ try { Add-LogMessage -Level Warning "Temporarily allowing outbound internet access from $($config.sre.webapps.gitlabReview.ip)..." Add-NetworkSecurityGroupRule -NetworkSecurityGroup $nsgAirlock -Name "TmpAllowOutboundInternetGitlabReview" -SourceAddressPrefix $config.sre.webapps.gitlabReview.ip -Access Allow -Description "Allow outbound internet" -DestinationAddressPrefix Internet -DestinationPortRange * -Direction Outbound -Priority 100 -Protocol * -SourcePortRange * + $nsgAirlock = Get-AzNetworkSecurityGroup -Name $config.sre.network.nsg.airlock.name -ResourceGroupName $config.sre.network.vnet.rg $null = Deploy-UbuntuVirtualMachine @gitlabReviewDeploymentParams @commonDeploymentParams Enable-AzVM -Name $config.sre.webapps.gitlabReview.vmName -ResourceGroupName $config.sre.webapps.rg } finally { - $null = Remove-AzNetworkSecurityRuleConfig -Name "TmpAllowOutboundInternetGitlabReview" -NetworkSecurityGroup $nsgAirlock + $nsgAirlock = Remove-AzNetworkSecurityRuleConfig -Name "TmpAllowOutboundInternetGitlabReview" -NetworkSecurityGroup $nsgAirlock } From 6e7d2a1c3586f703434d3d2f4c621ae6b7dea118 Mon Sep 17 00:00:00 2001 From: Oliver Strickson Date: Thu, 16 Jul 2020 08:53:12 +0100 Subject: [PATCH 153/155] Rename NSG rules template; adjust rules - Add rules for outbound connections identity server - Remove inbound RDP rule - Add inbound rule for session host --- ...son => sre-webapp-nsg-rules-template.json} | 40 +++++++++++++++++-- .../setup/Setup_SRE_WebApp_Servers.ps1 | 2 +- 2 files changed, 37 insertions(+), 5 deletions(-) rename deployment/secure_research_environment/arm_templates/{sre-nsg-rules-template.json => sre-webapp-nsg-rules-template.json} (84%) diff --git a/deployment/secure_research_environment/arm_templates/sre-nsg-rules-template.json b/deployment/secure_research_environment/arm_templates/sre-webapp-nsg-rules-template.json similarity index 84% rename from deployment/secure_research_environment/arm_templates/sre-nsg-rules-template.json rename to deployment/secure_research_environment/arm_templates/sre-webapp-nsg-rules-template.json index 7c53fff186..e1004f068b 100755 --- a/deployment/secure_research_environment/arm_templates/sre-nsg-rules-template.json +++ b/deployment/secure_research_environment/arm_templates/sre-webapp-nsg-rules-template.json @@ -109,6 +109,22 @@ "sourcePortRange": "*" } }, + { + "type": "Microsoft.Network/networkSecurityGroups/securityRules", + "apiVersion": "2020-04-01", + "name": "[concat(parameters('nsgWebappsName'), '/DenyOutboundAnyAnywhere')]", + "properties": { + "access": "Allow", + "description": "Allow outbound connections to the identity server", + "destinationAddressPrefix": "10.0.0.0/24", + "destinationPortRange": "*", + "direction": "Outbound", + "priority": 3000, + "protocol": "*", + "sourceAddressPrefix": "*", + "sourcePortRange": "*" + } + }, { "type": "Microsoft.Network/networkSecurityGroups/securityRules", "apiVersion": "2020-04-01", @@ -144,12 +160,12 @@ { "type": "Microsoft.Network/networkSecurityGroups/securityRules", "apiVersion": "2020-04-01", - "name": "[concat(parameters('nsgAirlockName'), '/AllowInboundRdpSessionHostReview')]", + "name": "[concat(parameters('nsgAirlockName'), '/AllowInboundHttpSessionHostReview')]", "properties": { "access": "Allow", - "description": "Allow inbound RDP connections from GitLab review VM", + "description": "Allow inbound http(s) from application session host", "destinationAddressPrefix": "VirtualNetwork", - "destinationPortRanges": ["3389"], + "destinationPortRanges": ["80", "443"], "direction": "Inbound", "priority": 2000, "protocol": "Tcp", @@ -173,6 +189,22 @@ "sourcePortRange": "*" } }, + { + "type": "Microsoft.Network/networkSecurityGroups/securityRules", + "apiVersion": "2020-04-01", + "name": "[concat(parameters('nsgAirlockName'), '/DenyOutboundAnyAnywhere')]", + "properties": { + "access": "Allow", + "description": "Allow outbound connections to the identity server", + "destinationAddressPrefix": "10.0.0.0/24", + "destinationPortRange": "*", + "direction": "Outbound", + "priority": 3000, + "protocol": "*", + "sourceAddressPrefix": "*", + "sourcePortRange": "*" + } + }, { "type": "Microsoft.Network/networkSecurityGroups/securityRules", "apiVersion": "2020-04-01", @@ -206,4 +238,4 @@ } } ] -} \ No newline at end of file +} diff --git a/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 b/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 index af7270dcef..2edee7f7b6 100644 --- a/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 +++ b/deployment/secure_research_environment/setup/Setup_SRE_WebApp_Servers.ps1 @@ -50,7 +50,7 @@ $params = @{ subnetComputeCidr = $config.sre.network.vnet.subnets.data.cidr subnetVpnCidr = "172.16.201.0/24" # TODO fix this when it is no longer hard-coded } -Deploy-ArmTemplate -TemplatePath (Join-Path $PSScriptRoot ".." "arm_templates" "sre-nsg-rules-template.json") -Params $params -ResourceGroupName $config.sre.network.vnet.rg +Deploy-ArmTemplate -TemplatePath (Join-Path $PSScriptRoot ".." "arm_templates" "sre-webapp-nsg-rules-template.json") -Params $params -ResourceGroupName $config.sre.network.vnet.rg # Check that VNET and subnets exist From 3462ba62829b1e1ad87815f72253ce2473958042 Mon Sep 17 00:00:00 2001 From: Oliver Strickson Date: Thu, 16 Jul 2020 08:59:41 +0100 Subject: [PATCH 154/155] Return PSNetworkSecurityGroup object from Add-NetworkSecurityGroupRule --- deployment/common/Deployments.psm1 | 48 +++++++++++++++--------------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/deployment/common/Deployments.psm1 b/deployment/common/Deployments.psm1 index 9d81d0e363..4f70ed234b 100644 --- a/deployment/common/Deployments.psm1 +++ b/deployment/common/Deployments.psm1 @@ -33,20 +33,20 @@ function Add-NetworkSecurityGroupRule { ) try { if ($VerboseLogging) { Add-LogMessage -Level Info "Ensuring that NSG rule '$Name' exists on '$($NetworkSecurityGroup.Name)'..." } - $null = Get-AzNetworkSecurityRuleConfig -Name $Name -NetworkSecurityGroup $NetworkSecurityGroup -ErrorVariable notExists -ErrorAction SilentlyContinue + $rule = Get-AzNetworkSecurityRuleConfig -Name $Name -NetworkSecurityGroup $NetworkSecurityGroup -ErrorVariable notExists -ErrorAction SilentlyContinue if ($notExists) { if ($VerboseLogging) { Add-LogMessage -Level Info "[ ] Creating NSG rule '$Name'" } - $null = Add-AzNetworkSecurityRuleConfig -Name "$Name" ` - -Access "$Access" ` - -Description "$Description" ` - -DestinationAddressPrefix $DestinationAddressPrefix ` - -DestinationPortRange $DestinationPortRange ` - -Direction "$Direction" ` - -NetworkSecurityGroup $NetworkSecurityGroup ` - -Priority $Priority ` - -Protocol "$Protocol" ` - -SourceAddressPrefix $SourceAddressPrefix ` - -SourcePortRange $SourcePortRange | Set-AzNetworkSecurityGroup -ErrorAction Stop + $nsg = Add-AzNetworkSecurityRuleConfig -Name "$Name" ` + -Access "$Access" ` + -Description "$Description" ` + -DestinationAddressPrefix $DestinationAddressPrefix ` + -DestinationPortRange $DestinationPortRange ` + -Direction "$Direction" ` + -NetworkSecurityGroup $NetworkSecurityGroup ` + -Priority $Priority ` + -Protocol "$Protocol" ` + -SourceAddressPrefix $SourceAddressPrefix ` + -SourcePortRange $SourcePortRange | Set-AzNetworkSecurityGroup -ErrorAction Stop if ($?) { if ($VerboseLogging) { Add-LogMessage -Level Success "Created NSG rule '$Name'" } } else { @@ -54,22 +54,22 @@ function Add-NetworkSecurityGroupRule { } } else { if ($VerboseLogging) { Add-LogMessage -Level InfoSuccess "Updating NSG rule '$Name'" } - $null = Set-AzNetworkSecurityRuleConfig -Name "$Name" ` - -Access "$Access" ` - -Description "$Description" ` - -DestinationAddressPrefix $DestinationAddressPrefix ` - -DestinationPortRange $DestinationPortRange ` - -Direction "$Direction" ` - -NetworkSecurityGroup $NetworkSecurityGroup ` - -Priority $Priority ` - -Protocol "$Protocol" ` - -SourceAddressPrefix $SourceAddressPrefix ` - -SourcePortRange $SourcePortRange | Set-AzNetworkSecurityGroup -ErrorAction Stop + $nsg = Set-AzNetworkSecurityRuleConfig -Name "$Name" ` + -Access "$Access" ` + -Description "$Description" ` + -DestinationAddressPrefix $DestinationAddressPrefix ` + -DestinationPortRange $DestinationPortRange ` + -Direction "$Direction" ` + -NetworkSecurityGroup $NetworkSecurityGroup ` + -Priority $Priority ` + -Protocol "$Protocol" ` + -SourceAddressPrefix $SourceAddressPrefix ` + -SourcePortRange $SourcePortRange | Set-AzNetworkSecurityGroup -ErrorAction Stop } } catch [Microsoft.Azure.Commands.Network.Common.NetworkCloudException] { Add-LogMessage -Level Fatal $_.Exception.Message.Split("`n")[0] } - + return $nsg } Export-ModuleMember -Function Add-NetworkSecurityGroupRule From 53f8e635444c24ed148fd455775dc315ac6af72b Mon Sep 17 00:00:00 2001 From: Oliver Strickson Date: Wed, 22 Jul 2020 09:08:38 +0100 Subject: [PATCH 155/155] Add back rule permitting inbound RDP connections to the webapp NSG The absence of this rule was blocking RDP connections between the APP session host and the DSVM, since this NSG is applied to the DATA subnet. There is almost certainly something more fundamentally wrong, but this patch means that if this branch is deployed (e.g. in a test environment), it won't cause this issue. --- .../sre-webapp-nsg-rules-template.json | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/deployment/secure_research_environment/arm_templates/sre-webapp-nsg-rules-template.json b/deployment/secure_research_environment/arm_templates/sre-webapp-nsg-rules-template.json index e1004f068b..8001213c16 100755 --- a/deployment/secure_research_environment/arm_templates/sre-webapp-nsg-rules-template.json +++ b/deployment/secure_research_environment/arm_templates/sre-webapp-nsg-rules-template.json @@ -157,6 +157,22 @@ "sourcePortRange": "*" } }, + { + "type": "Microsoft.Network/networkSecurityGroups/securityRules", + "apiVersion": "2020-04-01", + "name": "[concat(parameters('nsgAirlockName'), '/AllowInboundRdpSessionHostReview')]", + "properties": { + "access": "Allow", + "description": "Allow inbound RDP from application session host", + "destinationAddressPrefix": "VirtualNetwork", + "destinationPortRanges": ["3389"], + "direction": "Inbound", + "priority": 1900, + "protocol": "Tcp", + "sourceAddressPrefix": "[parameters('ipAddressSessionHostReview')]", + "sourcePortRange": "*" + } + }, { "type": "Microsoft.Network/networkSecurityGroups/securityRules", "apiVersion": "2020-04-01",