Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: added support for GCS for image exporter plugin #4625

Merged
merged 12 commits into from
Mar 12, 2024
3 changes: 3 additions & 0 deletions scripts/sql/226_image_exporter_gcp_support.down.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
DELETE FROM plugin_step_variable where name='GcpServiceAccountJson';
DELETE FROM plugin_step_variable where name='GcpProjectName';
UPDATE plugin_step_variable SET description='Provide which cloud storage provider you want to use: "aws" for Amazon S3 or "azure" for Azure Blob Storage' WHERE name='CloudProvider';
81 changes: 81 additions & 0 deletions scripts/sql/226_image_exporter_gcp_support.up.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,81 @@
UPDATE plugin_pipeline_script SET script=E'#!/bin/bash
set -eo pipefail
if [[ $GcpServiceAccountJson ]]
then
echo $GcpServiceAccountJson > output.txt
cat output.txt| base64 -d > gcloud.json
fi
architecture=$(uname -m)
export platform=$(echo $CI_CD_EVENT | jq --raw-output .commonWorkflowRequest.ciBuildConfig.dockerBuildConfig.targetPlatform)
echo $platform
arch
if [[ $platform == "linux/arm64,linux/amd64" ]] ; then
platform=$Platform
elif [[ $platform == "linux/arm64" ]]
then
platform="arm64"
elif [[ $platform == "linux/amd64" ]]
then
platform="amd64"
else
if [[ $architecture == "x86_64" ]]
then
platform="amd64"
else
platform="arm64"
fi
fi
echo $platform
CloudProvider=$(echo "$CloudProvider" | awk \'{print tolower($0)}\')
current_timestamp=$(date +%s)
if [[ -z $FilePrefix ]]
then
file=$ContainerImage.tar
file=$(echo $file | tr \'/\' \'_\')
else
file=$FilePrefix-$ContainerImage.tar
file=$(echo $file | tr \'/\' \'_\')
fi
echo $file
future_timestamp=$((current_timestamp + $Expiry * 60))
future_date=$(date -u -d@"$future_timestamp" +"%Y-%m-%dT%H:%M:%SZ")
aws_secs=$(($Expiry * 60))
gcp_secs="${Expiry}m"
docker pull --platform linux/$platform $ContainerImage
docker save $ContainerImage > $file
ls
if [ $CloudProvider == "azure" ]
then
docker run --network=host --rm -v $(pwd):/data mcr.microsoft.com/azure-cli /bin/bash -c " az storage blob upload --account-name $AzureAccountName --account-key $AzureAccountKey --container-name $BucketName --name $file --file data/$file"
echo "docker run --rm mcr.microsoft.com/azure-cli /bin/bash -c " az storage blob generate-sas --account-name $AzureAccountName --account-key $AzureAccountKey --container-name $BucketName --name $file --permissions r --expiry $future_date""
sas_token=$(docker run --network=host --rm mcr.microsoft.com/azure-cli /bin/bash -c " az storage blob generate-sas --account-name $AzureAccountName --account-key $AzureAccountKey --container-name $BucketName --name $file --permissions r --expiry $future_date")
token=$sas_token
echo $token
token=$(echo $sas_token| tr -d \'"\')
echo $token
link=https://$AzureAccountName.blob.core.windows.net/$BucketName/$file?$token
fi
if [ $CloudProvider == "aws" ]
then
echo "aws command"
docker run --network=host --rm -v $(pwd):/data -e AWS_ACCESS_KEY_ID=$AwsAccessKey -e AWS_SECRET_ACCESS_KEY=$AwsSecretKey public.ecr.aws/aws-cli/aws-cli:latest s3 cp /data/$file s3://$BucketName --region $AwsRegion
link=$(docker run --network=host --rm -v $(pwd):/data -e AWS_ACCESS_KEY_ID=$AwsAccessKey -e AWS_SECRET_ACCESS_KEY=$AwsSecretKey public.ecr.aws/aws-cli/aws-cli:latest s3 presign s3://$BucketName/$file --region $AwsRegion --expires-in $aws_secs )
fi
if [ $CloudProvider == "gcp" ]
then
echo "gcp command"
docker run --network=host --rm -v $(pwd):/data quay.io/devtron/test:69a6cb4fb76e /bin/bash -c "gcloud auth activate-service-account --key-file=data/gcloud.json;gcloud config set project $GcpProjectName; gcloud storage ls;gsutil cp data/$file gs://$BucketName/ ; gcloud storage ls gs://$BucketName/;"
link=$(docker run --network=host --rm -v $(pwd):/data quay.io/devtron/test:69a6cb4fb76e /bin/bash -c "gcloud auth activate-service-account --key-file=data/gcloud.json;gcloud config set project $GcpProjectName; gsutil signurl -d $gcp_secs data/gcloud.json gs://$BucketName/$file "| awk \'{print $NF}\' )
fi
echo "***Copy the below link to download the tar file***"
echo $link
' WHERE id=(select script_id from plugin_step where id=(SELECT ps.id FROM plugin_metadata p inner JOIN plugin_step ps on ps.plugin_id=p.id WHERE p.name='Container Image Exporter v1.0.0' and ps."index"=1 and ps.deleted=false));


INSERT INTO "plugin_step_variable" ("id", "plugin_step_id", "name", "format", "description", "is_exposed", "allow_empty_value", "variable_type", "value_type", "variable_step_index", "deleted", "created_on", "created_by", "updated_on", "updated_by") VALUES
(nextval('id_seq_plugin_step_variable'), (SELECT ps.id FROM plugin_metadata p inner JOIN plugin_step ps on ps.plugin_id=p.id WHERE p.name='Container Image Exporter v1.0.0' and ps."index"=1 and ps.deleted=false), 'GcpServiceAccountJson','STRING','Provide Google cloud service account json creds in base64 format/Use scoped variables',true,true,'INPUT','NEW',1 ,'f','now()', 1, 'now()', 1),
(nextval('id_seq_plugin_step_variable'), (SELECT ps.id FROM plugin_metadata p inner JOIN plugin_step ps on ps.plugin_id=p.id WHERE p.name='Container Image Exporter v1.0.0' and ps."index"=1 and ps.deleted=false), 'GcpProjectName','STRING','Specify Google Account Project Name',true,true,'INPUT','NEW',1 ,'f','now()', 1, 'now()', 1);

UPDATE plugin_step_variable SET description='Provide which cloud storage provider you want to use: "aws" for Amazon S3 or "azure" for Azure Blob Storage or "gcp" for Google Cloud Storage' WHERE name='CloudProvider';

UPDATE plugin_step_variable SET description='If you would like to add a prefix to the exported image files name, enter it here.' WHERE name='FilePrefix';
Loading