diff --git a/MANIFEST.in b/MANIFEST.in index 2645a8232..ff0578993 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -8,3 +8,4 @@ include functest_requirements.txt include test_requirements.txt include unittest_requirements.txt recursive-exclude pulp_file/tests/fixtures/ * +recursive-include pulp_file/tests/functional/api/from_pulpcore/artifacts * diff --git a/pulp_file/tests/functional/api/from_pulpcore/__init__.py b/pulp_file/tests/functional/api/from_pulpcore/__init__.py new file mode 100644 index 000000000..cfeede4f8 --- /dev/null +++ b/pulp_file/tests/functional/api/from_pulpcore/__init__.py @@ -0,0 +1 @@ +"""Tests for core functionality that require plugin involvement to exercise.""" diff --git a/pulp_file/tests/functional/api/from_pulpcore/artifacts/x509/certificates/ca.pem b/pulp_file/tests/functional/api/from_pulpcore/artifacts/x509/certificates/ca.pem new file mode 100644 index 000000000..b85f33b05 --- /dev/null +++ b/pulp_file/tests/functional/api/from_pulpcore/artifacts/x509/certificates/ca.pem @@ -0,0 +1,17 @@ +-----BEGIN CERTIFICATE----- +MIICoDCCAYgCCQC2c2uY34HNlzANBgkqhkiG9w0BAQUFADASMRAwDgYDVQQDDAdn +b3ZlZ2FuMB4XDTE5MDMxMzIxMDMzMFoXDTM4MDYxNjIxMDMzMFowEjEQMA4GA1UE +AwwHZ292ZWdhbjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANEatWsZ +1iwGmTxD02dxMI4ci+Au4FzvmWLBWD07H5GGTVFwnqmNOKhP6DHs1EsMZevkUvaG +CRxZlPYhjNFLZr2c2FnoDZ5nBXlSW6sodXURbMfyT187nDeBXVYFuh4T2eNCatnm +t3vgdi+pWsF0LbOgpu7GJI2sh5K1imxyB77tJ7PFTDZCSohkK+A+0nDCnJqDUNXD +5CK8iaBciCbnzp3nRKuM2EmgXno9Repy/HYxIgB7ZodPwDvYNjMGfvs0s9mJIKmc +CKgkPXVO9y9gaRrrytICcPOs+YoU/PN4Ttg6wzxaWvJgw44vsR8wM/0i4HlXfBdl +9br+cgn8jukDOgECAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAyNHV6NA+0GfUrvBq +AHXHNnBE3nzMhGPhF/0B/dO4o0n6pgGZyzRxaUaoo6+5oQnBf/2NmDyLWdalFWX7 +D1WBaxkhK+FU922+qwQKhABlwMxGCnfZ8F+rlk4lNotm3fP4wHbnO1SGIDvvZFt/ +mpMgkhwL4lShUFv57YylXr+D2vSFcAryKiVGk1X3sHMXlFAMLHUm3d97fJnmb1qQ +wC43BlJCBQF98wKtYNwTUG/9gblfk8lCB2DL1hwmPy3q9KbSDOdUK3HW6a75ZzCD +6mXc/Y0bJcwweDsywbPBYP13hYUcpw4htcU6hg6DsoAjLNkSrlY+GGo7htx+L9HH +IwtfRg== +-----END CERTIFICATE----- diff --git a/pulp_file/tests/functional/api/from_pulpcore/artifacts/x509/certificates/client.pem b/pulp_file/tests/functional/api/from_pulpcore/artifacts/x509/certificates/client.pem new file mode 100644 index 000000000..dde1f4801 --- /dev/null +++ b/pulp_file/tests/functional/api/from_pulpcore/artifacts/x509/certificates/client.pem @@ -0,0 +1,17 @@ +-----BEGIN CERTIFICATE----- +MIICqjCCAZICAgtCMA0GCSqGSIb3DQEBBQUAMBIxEDAOBgNVBAMMB2dvdmVnYW4w +HhcNMTkwMzEzMjEwMzMwWhcNMjkwMzEwMjEwMzMwWjAjMRAwDgYDVQQDDAdnb3Zl +Z2FuMQ8wDQYDVQQKDAZjbGllbnQwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK +AoIBAQCxJWx5t25jY4womWtKxGqv2LHg9YnU0b2VCECLhu5JjoAzFPja5VHB0Maz +G8m5c0+N2ubrPcBC+KdoGMd2MqrrGyzKOiwbVDW0YOgnFqh58p796iKtVboWx41y +Gzn289PzYccxH6mhhPmRVD25KyV1TenqvGIHJTepF7mgIemGDTv+j7+mYPT/3r6I +pnwTkEVPr+Q4iW0l3fNESlFFRt2b7yhz9f0E4SMhmIRnSIGOLO1zE02IJ1hTuGkx +/MZ1AqQdVSdm4jenTIMp91R1kYylI66yMcpU6w6x4j8qvJ8nBZ4r4DqOHcOobyHp +qlaJjv/K5SGJxV2k0EFk7b483lbrAgMBAAEwDQYJKoZIhvcNAQEFBQADggEBALYh +SHLGJCxVL8ePFLs294fhTq4pTQsvHm8q3SyJD9DaB+HKTceCFErNv18Dsl/QwBis +WPHWKpDN0EUcuuE/8oUaGjjzByJ8bPafMicFCHSSefcJw+IOOqKBkWDT+4YGkvfs +RpwxSLqLOhEt7aSkiPcMvD20v8cvj0O36c5G3Vv0E8WmPWOEqjyPFoU9X1vACr+h +DdIKvxFbvRU9ObektFxOYHuvP010IBv2dGyw3G5W5fh9A5OSXHAShWSwkRU36oft +ugB47fIIlb7zLm4GBmxGG0yBwAf4otBlUXVNqNx15bbUuVgKbGMFfItQgEo9AQcz +gGsetwDOs/NgZ95oH40= +-----END CERTIFICATE----- diff --git a/pulp_file/tests/functional/api/from_pulpcore/artifacts/x509/certificates/server.pem b/pulp_file/tests/functional/api/from_pulpcore/artifacts/x509/certificates/server.pem new file mode 100644 index 000000000..1adab8427 --- /dev/null +++ b/pulp_file/tests/functional/api/from_pulpcore/artifacts/x509/certificates/server.pem @@ -0,0 +1,17 @@ +-----BEGIN CERTIFICATE----- +MIICqjCCAZICAlmVMA0GCSqGSIb3DQEBBQUAMBIxEDAOBgNVBAMMB2dvdmVnYW4w +HhcNMTkwMzEzMjEwMzMwWhcNMzgwNjE2MjEwMzMwWjAjMRAwDgYDVQQDDAdnb3Zl +Z2FuMQ8wDQYDVQQKDAZzZXJ2ZXIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK +AoIBAQDJpzfiNGOBKdfaETbhbu0F+Y9a+trZ0ZHJCzMS/lClPjsaz0Mpc8eoosry +xnVklBPsL+I2dLp591aho16iKnGFHNpzPwgSbQGpMklSj51JtweMq4auEyqYdDrL +Yhy7EEldIFSQcJ06kZljosnwcKjbZbfFGhy96KRs2VuLo2XAvIw1IOdzc7frpinM +vBkc1eZwHyEVa7N+yXO9hFXjiS3hlVlOc8xS+9vK3Ck8PrfFcyO0mV/ichu4mIu9 +KIN+NX4SZXMBrTpxWBdHATX++GDRNLOTnyY53zWk5K424+ApZCfzkt9DupvwXpYb +6IznmzNErcE8JzPYFh7fpD9a3F/DAgMBAAEwDQYJKoZIhvcNAQEFBQADggEBADvu +Up7+Oj23MWTfbu9q1pwqoRtAdsMmvh2e45kKO8NRqgNWLtLannzE9nkJatZg/INj +hg2n7plQ6vtjnpj4YC81C7JJvPpckq6221tStdX4M+fMSItG9VPmUxhjIMJMB28r +Fat6UPqjRXWeGQmKGcFLo5uLf+r86jspyX/TfteQtR1xibACHWSm+7cwG6DhTWCE +7imYrR+zc70V6pPDHp7HNJ63CUT17NA4tV0yzrASv2tqsjGZ63yn3LP+V6+gbhlm +osATPQilDyWoJHdG1Gp/kgfqipmEsr6N7PrJ/eZm1MQ7/pAfmtK8Wl87XrZN4SFe +TfvUdpZAtadl79PGmCk= +-----END CERTIFICATE----- diff --git a/pulp_file/tests/functional/api/from_pulpcore/artifacts/x509/certificates/un_urlencoded_cert.txt b/pulp_file/tests/functional/api/from_pulpcore/artifacts/x509/certificates/un_urlencoded_cert.txt new file mode 100644 index 000000000..b4c415918 --- /dev/null +++ b/pulp_file/tests/functional/api/from_pulpcore/artifacts/x509/certificates/un_urlencoded_cert.txt @@ -0,0 +1 @@ +-----BEGIN CERTIFICATE----- MIICqjCCAZICAgtCMA0GCSqGSIb3DQEBBQUAMBIxEDAOBgNVBAMMB2dvdmVnYW4w HhcNMTkwMzEzMjEwMzMwWhcNMjkwMzEwMjEwMzMwWjAjMRAwDgYDVQQDDAdnb3Zl Z2FuMQ8wDQYDVQQKDAZjbGllbnQwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK AoIBAQCxJWx5t25jY4womWtKxGqv2LHg9YnU0b2VCECLhu5JjoAzFPja5VHB0Maz G8m5c0+N2ubrPcBC+KdoGMd2MqrrGyzKOiwbVDW0YOgnFqh58p796iKtVboWx41y Gzn289PzYccxH6mhhPmRVD25KyV1TenqvGIHJTepF7mgIemGDTv+j7+mYPT/3r6I pnwTkEVPr+Q4iW0l3fNESlFFRt2b7yhz9f0E4SMhmIRnSIGOLO1zE02IJ1hTuGkx /MZ1AqQdVSdm4jenTIMp91R1kYylI66yMcpU6w6x4j8qvJ8nBZ4r4DqOHcOobyHp qlaJjv/K5SGJxV2k0EFk7b483lbrAgMBAAEwDQYJKoZIhvcNAQEFBQADggEBALYh SHLGJCxVL8ePFLs294fhTq4pTQsvHm8q3SyJD9DaB+HKTceCFErNv18Dsl/QwBis WPHWKpDN0EUcuuE/8oUaGjjzByJ8bPafMicFCHSSefcJw+IOOqKBkWDT+4YGkvfs RpwxSLqLOhEt7aSkiPcMvD20v8cvj0O36c5G3Vv0E8WmPWOEqjyPFoU9X1vACr+h DdIKvxFbvRU9ObektFxOYHuvP010IBv2dGyw3G5W5fh9A5OSXHAShWSwkRU36oft ugB47fIIlb7zLm4GBmxGG0yBwAf4otBlUXVNqNx15bbUuVgKbGMFfItQgEo9AQcz gGsetwDOs/NgZ95oH40= -----END CERTIFICATE----- \ No newline at end of file diff --git a/pulp_file/tests/functional/api/from_pulpcore/artifacts/x509/certificates/untrusted_client.pem b/pulp_file/tests/functional/api/from_pulpcore/artifacts/x509/certificates/untrusted_client.pem new file mode 100644 index 000000000..29011d82b --- /dev/null +++ b/pulp_file/tests/functional/api/from_pulpcore/artifacts/x509/certificates/untrusted_client.pem @@ -0,0 +1,18 @@ +-----BEGIN CERTIFICATE----- +MIIC8jCCAdoCAgOtMA0GCSqGSIb3DQEBBQUAMDYxNDAyBgNVBAMMK3B1bHAzLXNv +dXJjZS1mZWRvcmEzMS5sb2NhbGhvc3QuZXhhbXBsZS5jb20wHhcNMjAwNDAzMTky +OTI5WhcNMzAwNDAxMTkyOTI5WjBHMTQwMgYDVQQDDCtwdWxwMy1zb3VyY2UtZmVk +b3JhMzEubG9jYWxob3N0LmV4YW1wbGUuY29tMQ8wDQYDVQQKDAZjbGllbnQwggEi +MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC+/beGWsHQJYbFV86jmi/FJOCe ++7+myDYWEZFFMu7IPvkoSQPC9OJh/U6IOhmXWqoIMENh118FzLYB9bAmZt/gbMtc +4i6XKzHfgiHEGfM/s+CdqC0qNrxelydwt3ceBOh71chbalsALnAthwSHMn5VWOz1 +ym89LSuf0jlqchOqw6Vf/8t2PIhRagM13B0pcFeqCNv/jv6jwzgN7WuI1hhC9EAT +KLm05Y0JDPXBb/Gq97X66PhDOoXso0i5yjHj7/KsFY1W4C8vY68l3epybJ7LJ7Kf ++tV2YX27LLx8gagdadUDopUqGV3nFDjoY6+MtVRp4JU9h3/8BZJzUQdihM7LAgMB +AAEwDQYJKoZIhvcNAQEFBQADggEBAL9tT6vbbuycVyR3eBdzlZwMYMV++44EjYIp +daRXTQRzrGJ6A9hNLkE38k1gM+vTL62UOFXA+xALuXXfxx7BswGg9Au0bCyODa8K +LhuHAILVcFaRpW5HxQ56+oSGdt4tyKGwuZeBmfx0fWzP1RPML2WCFfHT4yPXz/+s +c00/8fJhWt+W6lmc95l2UC2+PSrZSAgqCMpjdg3gTzP3v9d7N55UxTJ5TEtTK5X1 +HKrez9JYYkm8ec72xboOKljdfRF7s1lcIMQ7PLFrS7fQ1xVZTFmYdQnAITypPztM +67Lxjxi0iGVQD3I+KxqzOj3AvgmO09H/QdNIbxJLiI/XyWTUFuc= +-----END CERTIFICATE----- diff --git a/pulp_file/tests/functional/api/from_pulpcore/artifacts/x509/genall.sh b/pulp_file/tests/functional/api/from_pulpcore/artifacts/x509/genall.sh new file mode 100755 index 000000000..d01ba1660 --- /dev/null +++ b/pulp_file/tests/functional/api/from_pulpcore/artifacts/x509/genall.sh @@ -0,0 +1,3 @@ +./genca.sh +./gensrv.sh +./genclient.sh diff --git a/pulp_file/tests/functional/api/from_pulpcore/artifacts/x509/genca.sh b/pulp_file/tests/functional/api/from_pulpcore/artifacts/x509/genca.sh new file mode 100755 index 000000000..730ee5705 --- /dev/null +++ b/pulp_file/tests/functional/api/from_pulpcore/artifacts/x509/genca.sh @@ -0,0 +1,46 @@ +HOSTNAME=`hostname` + +EXT_FILE=$1 +EXT=$2 + +mkdir -p keys +mkdir -p certificates + +# create CA key +openssl genrsa -out keys/ca.pem 2048 &> /dev/null + +# create the CA signing request. +openssl req \ + -new \ + -sha1 \ + -days 7035 \ + -key keys/ca.pem \ + -out ca.req \ + -subj "/CN=$HOSTNAME" + +# create the CA certificate +if [[ ! -z "$EXT_FILE" ]]; +then + echo "using: $EXT_FILE" + openssl x509 \ + -req \ + -days 7035 \ + -sha1 \ + -extfile $EXT_FILE \ + -extensions $EXT \ + -signkey keys/ca.pem \ + -in ca.req \ + -out certificates/ca.pem &> /dev/null +else + openssl x509 \ + -req \ + -days 7035 \ + -sha1 \ + -extensions v3_ca \ + -signkey keys/ca.pem \ + -in ca.req \ + -out certificates/ca.pem &> /dev/null +fi + +# remove CA signing request +rm ca.req diff --git a/pulp_file/tests/functional/api/from_pulpcore/artifacts/x509/genclient.sh b/pulp_file/tests/functional/api/from_pulpcore/artifacts/x509/genclient.sh new file mode 100755 index 000000000..b414b3629 --- /dev/null +++ b/pulp_file/tests/functional/api/from_pulpcore/artifacts/x509/genclient.sh @@ -0,0 +1,51 @@ +HOSTNAME=`hostname` + +EXT_FILE=$1 +EXT=$2 + +mkdir -p keys +mkdir -p certificates + +# create client key +openssl genrsa -out keys/client.pem 2048 &> /dev/null + +# create signing request for client +openssl req \ + -new \ + -key keys/client.pem \ + -out client.req \ + -nodes \ + -subj "/CN=$HOSTNAME/O=client" &> /dev/null + +# sign server request w/ CA key and gen x.509 cert. +if [[ ! -z "$EXT_FILE" ]]; +then + echo "using: $EXT_FILE" + openssl x509 \ + -req \ + -sha1 \ + -in client.req \ + -out certificates/client.pem \ + -CA certificates/ca.pem \ + -CAkey keys/ca.pem \ + -CAcreateserial \ + -set_serial $RANDOM \ + -extfile $EXT_FILE \ + -extensions $EXT \ + -days 3650 +else + openssl x509 \ + -req \ + -sha1 \ + -extensions usr_cert \ + -in client.req \ + -out certificates/client.pem \ + -CA certificates/ca.pem \ + -CAkey keys/ca.pem \ + -CAcreateserial \ + -set_serial $RANDOM \ + -days 3650 +fi + +# remove CA signing request +rm client.req diff --git a/pulp_file/tests/functional/api/from_pulpcore/artifacts/x509/gensrv.sh b/pulp_file/tests/functional/api/from_pulpcore/artifacts/x509/gensrv.sh new file mode 100755 index 000000000..ae8681157 --- /dev/null +++ b/pulp_file/tests/functional/api/from_pulpcore/artifacts/x509/gensrv.sh @@ -0,0 +1,53 @@ +HOSTNAME=`hostname` + +EXT_FILE=$1 +EXT=$2 + +mkdir -p keys +mkdir -p certificates + +# create client key +openssl genrsa -out keys/server.pem 2048 &> /dev/null + +# create signing request +openssl req \ + -new \ + -key keys/server.pem \ + -out server.req \ + -nodes \ + -subj "/CN=$HOSTNAME/O=server" &> /dev/null + +# sign server request w/ CA key and gen x.509 cert. +if [[ ! -z "$EXT_FILE" ]]; +then + echo "using: $EXT_FILE" + openssl x509 \ + -req \ + -days 7035 \ + -extfile $EXT_FILE \ + -extensions $EXT \ + -in server.req \ + -out certificates/server.pem \ + -sha1 \ + -CA certificates/ca.pem \ + -CAkey keys/ca.pem \ + -CAcreateserial \ + -set_serial $RANDOM + -subj "/CN=$HOSTNAME" &> /dev/null +else + openssl x509 \ + -req \ + -days 7035 \ + -extensions usr_cert \ + -in server.req \ + -out certificates/server.pem \ + -sha1 \ + -CA certificates/ca.pem \ + -CAkey keys/ca.pem \ + -CAcreateserial \ + -set_serial $RANDOM + -subj "/CN=$HOSTNAME" &> /dev/null +fi + +# remove CA signing request +rm server.req diff --git a/pulp_file/tests/functional/api/from_pulpcore/artifacts/x509/keys/ca.pem b/pulp_file/tests/functional/api/from_pulpcore/artifacts/x509/keys/ca.pem new file mode 100644 index 000000000..4b96a0577 --- /dev/null +++ b/pulp_file/tests/functional/api/from_pulpcore/artifacts/x509/keys/ca.pem @@ -0,0 +1,27 @@ +-----BEGIN RSA PRIVATE KEY----- +MIIEowIBAAKCAQEA0Rq1axnWLAaZPEPTZ3EwjhyL4C7gXO+ZYsFYPTsfkYZNUXCe +qY04qE/oMezUSwxl6+RS9oYJHFmU9iGM0UtmvZzYWegNnmcFeVJbqyh1dRFsx/JP +XzucN4FdVgW6HhPZ40Jq2ea3e+B2L6lawXQts6Cm7sYkjayHkrWKbHIHvu0ns8VM +NkJKiGQr4D7ScMKcmoNQ1cPkIryJoFyIJufOnedEq4zYSaBeej1F6nL8djEiAHtm +h0/AO9g2MwZ++zSz2YkgqZwIqCQ9dU73L2BpGuvK0gJw86z5ihT883hO2DrDPFpa +8mDDji+xHzAz/SLgeVd8F2X1uv5yCfyO6QM6AQIDAQABAoIBADgAfzX/5WxFGCUb +xTKfDfIBkzSnWU+Km/nm7kr32qyO24ZEMzoVqGbrf7RfSTwQRfu3WsDLhQUiuNJy +idRD7XA64Xayv3L9BGpJtGNSoqKH2Cp0qu1GecAj1WuEfSQxDNngn91b/SggJYb+ +RGArTVIeZJbuWjvXSTfxhvEkWQ642P89yafx8JsfVa5XJatkTY8TMWSSxiWI/agW +9mdukMJ1nyot/cxF2Fizk+xCRaL5EWesQehJYkAY0gA0yB8OFv7YlAnVJ2AJ2vKr +dbdfYz62vNMbto14n4OYt7BGAgSAOmKFZkFOWNe5Gz7jl7NX7bg/MbhI8GvOMN6O +3tj8dkECgYEA9+vumWeap66aqzqUXPt4bw9m9EPNlUdFcGAXzkFnEdRl/m4ChdEq +OxMxZX8yKhgRuFhrIoAE0mF8MbXgmPVEpG6L8bMO4b7KE9kwDMZIL9EgZYzm3G/E +WAsHHBquSurZjusY2PqjiM5CTr8vQT/3FKgnsrCAyFoUueDc1RgJHZcCgYEA1+r6 +RfnkL5yljYQblOzcIg8vBhzAth3Ay4ZxjdBte24qrDkl3AFkkZJHM0P1LyuAbZzl +a6JQnAmSIJgeivSUBtQCRn9m3NwIUzMDfWNrYTidBXJ9N/giyEoxptdsK3Ihgyd8 +p3EFbJncTjBqGWBMN9yqn8ENMFfU/54K/otFCCcCgYEAwjJNoF42F1i+kJd/NWJJ +UP1IkNc3htHgA4lRvF8gpCeyiNyVH0rbAJElJTLAEZbu8m5X5RtQK5bu2GV1+3pL +fV2WQwoWAyAWW/vJFoiObN4ruT/oag0mSCzDMfF4Sviw17ExsmyJgpTF/bbmBf9b +xBN/nQmCpu1gxMTtVgHQyhsCgYAK+xtRVb0iIsBUqyRP9Z/YZ5dHo0y1tluoviSm +RrRas6ad7aYcQ0HfGWZfmBQvGvJEINeWGsIovV/eIn5jKL8ifcPD3BTaJKKTwKNr +zBOY9S0fKsxtfUQflY5i4s/QgdRsfCSdy2SwOj73BwVOVxRpPuK6jzHhhIYDngEH +IwQWvwKBgHmH48rS/LYejiZdAOnwpowcf75pOHcVB2oYYPqhpWYnUtR8o7Wzn9FJ +DnGeF+6iLpi2gvu+wRZlzKC3zCe+MognWZnuhCc2gbzzVV3ACjuyMMzwXGpDi/C0 +6bRaUWlJ3f9RdeNTGOIOW7zh+OV5hF0KdL5ELaAcPUyajzxjBKC3 +-----END RSA PRIVATE KEY----- diff --git a/pulp_file/tests/functional/api/from_pulpcore/artifacts/x509/keys/client.pem b/pulp_file/tests/functional/api/from_pulpcore/artifacts/x509/keys/client.pem new file mode 100644 index 000000000..edc765591 --- /dev/null +++ b/pulp_file/tests/functional/api/from_pulpcore/artifacts/x509/keys/client.pem @@ -0,0 +1,27 @@ +-----BEGIN RSA PRIVATE KEY----- +MIIEpAIBAAKCAQEAsSVsebduY2OMKJlrSsRqr9ix4PWJ1NG9lQhAi4buSY6AMxT4 +2uVRwdDGsxvJuXNPjdrm6z3AQvinaBjHdjKq6xssyjosG1Q1tGDoJxaoefKe/eoi +rVW6FseNchs59vPT82HHMR+poYT5kVQ9uSsldU3p6rxiByU3qRe5oCHphg07/o+/ +pmD0/96+iKZ8E5BFT6/kOIltJd3zREpRRUbdm+8oc/X9BOEjIZiEZ0iBjiztcxNN +iCdYU7hpMfzGdQKkHVUnZuI3p0yDKfdUdZGMpSOusjHKVOsOseI/KryfJwWeK+A6 +jh3DqG8h6apWiY7/yuUhicVdpNBBZO2+PN5W6wIDAQABAoIBAE9ZYBm1XLEQajvV +XQYErXVPL36A/dhiivUy2Ramvh4cwrge8Mm8Zr7WcerQy69SQ5RbNYyyAGj/UJ2E +W/RKBd2CMrHzFMa5FcVSkyrPL3BQiX9bXWUfPVfu0yEFqRVBavNeHQeYEzfShKfg +gDVtaj39RIZtE8v3k9MeWXutNRliHQRcuLsVkozpGOjHfnitMzKIRvQiq0pAY8qe +gMtRv7z4XvTMaR7MQ5nZSJ+9yfBGhmsRNUbbbnK0b3ZmuXgmRXIHj9uK4io9IrQw +Ttr07ruQM1Wa9Ycqih7eKztVWogmRL5YG9IeoHwjRIR2fecfZN/cCdY2rL9l59L2 +vK2e/CECgYEA47NWPQIJz9JZKTFGNjbAbR3oCplbqtqpP1QX/ar81SBpiupT6C98 +Rv91Oai7/G2BsSz+hacbXL9SuxCeqEForZyPdGrIezqlQqfpkzGc5tPWvObrNLsF +dY+FqR0eJkQ1pqPv3/X1Yw4VTN+HeNJFpjiOLW5cGf6VXgu1mBNOOrUCgYEAxyme +NJzAOxjW+36mW47IjiRbCiyTp5nVP17tZ5MC85lqO8mlui6KaDObKQoMNXlD9fjs +1nWRmtU3ypHGEV/wWh+vvekiuIIB3hgON3zHzzfl8nnXP4dE63qKiuC4el9wc6vT +PkkX7NcDWokvgfY8ytcndtNgB5BwwLzj4H/mLx8CgYEAxlBwgKwgzY8ftp/Czf77 +s02Dxo9w5iqRk6VzS6y7jggvxBx4vdV1wQRHHdPW/nCFr0FH4DH9w9aigGhmNpbZ +S7P6wWMrfyBBxP8nkNp6FBRWh6n7Gq70f0JkIDlLA/sOWzeW+RFoCkC+zng0rkc9 +ABtrwSfIMZ+oigNawru1u60CgYEAjvE64C4E9iVs3xJWWe5P2V0UjmfSDf+mAxah +s63a93WLvp+Kt+cBZapsWXchrht8AIWJjp7RQ1x274fSrdO60SRe4SH7zotYxctk +IMUJL5vCCVcTe0Ad4hiQaP7CyOnI7uxK8DBL4Kutd4RwuYk+tqLXazxoduVfV6X6 +s714OycCgYATTRlJEKpMlLbWLEH55Vv+qgGoB4G2M4xHOP8uXyMhQvDs178A7SDF +wGHYYbcryHCZ6sWppQtwkA0rTmRg4VSwyRlyCGQxc4rf94TiU9XjmeB+gjobUX/N +Dz8Wv3GPUljjfHvxvjShXLLCBXFn5RN7j3M6mqhk+4daN7B/rqTG+g== +-----END RSA PRIVATE KEY----- diff --git a/pulp_file/tests/functional/api/from_pulpcore/artifacts/x509/keys/server.pem b/pulp_file/tests/functional/api/from_pulpcore/artifacts/x509/keys/server.pem new file mode 100644 index 000000000..4ea0d3872 --- /dev/null +++ b/pulp_file/tests/functional/api/from_pulpcore/artifacts/x509/keys/server.pem @@ -0,0 +1,27 @@ +-----BEGIN RSA PRIVATE KEY----- +MIIEowIBAAKCAQEAyac34jRjgSnX2hE24W7tBfmPWvra2dGRyQszEv5QpT47Gs9D +KXPHqKLK8sZ1ZJQT7C/iNnS6efdWoaNeoipxhRzacz8IEm0BqTJJUo+dSbcHjKuG +rhMqmHQ6y2IcuxBJXSBUkHCdOpGZY6LJ8HCo22W3xRocveikbNlbi6NlwLyMNSDn +c3O366YpzLwZHNXmcB8hFWuzfslzvYRV44kt4ZVZTnPMUvvbytwpPD63xXMjtJlf +4nIbuJiLvSiDfjV+EmVzAa06cVgXRwE1/vhg0TSzk58mOd81pOSuNuPgKWQn85Lf +Q7qb8F6WG+iM55szRK3BPCcz2BYe36Q/WtxfwwIDAQABAoIBAQC4lEtezr1V+UsO +3Drz1mk20twZVw0D3AQplgrGE+sVlgTb4WOEienzvi8uUeIakkA6LQpO9kHKak/t +3HA+18LZKTP70XI+C3E9wj3DSJmcY+CRLmA07m5K5fj7OW86+wlsKK+/FpWUirlw +rg1R/I1TZaP00pYEbvIygDJ/eq0k4NERmP/GuU6fjzglYRaXxo4GBaj/jZcdfxSA +KQ7hJf3t13zHQKsfhPpH1cwq9s+9cglPuFKxqTobas96X6MXNyddLbs4G8XdJgwd +lZx7YV6Ma9hcu9MpPdysDFSfF9o/zpJJ0dWR6dXLyojqjA9GCSRpTWg1oX3XVFcw +Os1gsCehAoGBAOkE/hrSURRJOlHFlvM2g1HdpR1Uxapg5tJQcdFyUlHZinkpnU1E +70pPq3f+JmvF/9GD59SlS5kYYkwVTwWWwE/zgaAtXp1KXcl3Br2df0wZNQSs6FwE +2Yxy3vNtC8R2zv9mhKoSLK+HxmkTNc9JPDDJp3uQxPtUW/n9MW1Esf5fAoGBAN2K +U05/8kp9pPqKgtzngzjwAUPc0frMgLmielvXFbadUHWHkKcIPq2pEwHpmjSpdf1C +Uegxrf8ZHYkwQqeCQbhNMkakWC6IwWvJ/xq/Q+HgW/pQerze7u62tC7Zs0PmKjj0 +JCU7739OhupATvD03Qty8U27C/fIA+SlvSMNWdEdAoGAESQp4SU5i6l7XcqQOSH4 +dQw9+jNjOw5XzH7YcJTdU2ISWqliBe4dQDUuIuySPPlawTnX0v+52PA/QuOKZZna +WcdK8NKFlEKFNa3I1CgC1kFAcjCaAzLkIf3NQtDxvakTvxv2hGBD40G0qcQdADud +7uTWqjNTGj3xaili+mvEQDcCgYApdCOocap9o8sGEnrwKOXV9prV/vDPlAqWMb/h +hXqLpB4VD9vSVWmdByEGX5a6OedZD1rSLtGXBTUQoZYnT+lKOi+pJkFV28MNj0uL +zrQ+BMRRL1P4GWLuCgPnLFct4liONai5yuBCmwgFtyjOT5M4/DYIzHIt2CBaXWTp +tSrURQKBgCe9FxuXaWIJoI7fyDyMMURs1/Cj2t5NQWCEnfoHNlupW+vYmI/6qzfQ +l+jA/Zy7twHjLxiq0S63MH4IHIBgdd8yBLJ5uTYCK8tLRe0QiknvhfKJf68N6oM6 +nPPd2Q/ESykE92cmL9dnWg0+w6xYcHYWRKQKUqV1NDNltH6U6i/v +-----END RSA PRIVATE KEY----- diff --git a/pulp_file/tests/functional/api/from_pulpcore/constants.py b/pulp_file/tests/functional/api/from_pulpcore/constants.py new file mode 100644 index 000000000..56f7c95c5 --- /dev/null +++ b/pulp_file/tests/functional/api/from_pulpcore/constants.py @@ -0,0 +1,121 @@ +"""Constants for pulpcore API tests that require the use of a plugin.""" +import os +from urllib.parse import urljoin +from types import SimpleNamespace + +from pulp_smash import config +from pulp_smash.pulp3.constants import ( + BASE_DISTRIBUTION_PATH, + BASE_PATH, + BASE_PUBLICATION_PATH, + BASE_REMOTE_PATH, + BASE_REPO_PATH, + BASE_CONTENT_PATH, +) + +PULP_REPOSITORY_VERSION_BASE_URL = urljoin(BASE_PATH, "repository_versions/") + +PULP_PUBLICATION_BASE_URL = urljoin(BASE_PATH, "publications/") + +PULP_FIXTURES_BASE_URL = config.get_config().get_fixtures_url() + +PULP_CONTENT_HOST_BASE_URL = config.get_config().get_content_host_base_url() + +PULP_CONTENT_BASE_URL = urljoin(PULP_CONTENT_HOST_BASE_URL, "pulp/content/") + +FILE_CONTENT_NAME = "file.file" + +FILE_CONTENT_PATH = urljoin(BASE_CONTENT_PATH, "file/files/") + +FILE_REMOTE_PATH = urljoin(BASE_REMOTE_PATH, "file/file/") + +FILE_REPO_PATH = urljoin(BASE_REPO_PATH, "file/file/") + +FILE_DISTRIBUTION_PATH = urljoin(BASE_DISTRIBUTION_PATH, "file/file/") + +FILE_PUBLICATION_PATH = urljoin(BASE_PUBLICATION_PATH, "file/file/") + +FILE_CHUNKED_FIXTURE_URL = urljoin(PULP_FIXTURES_BASE_URL, "file-chunked/") + +FILE_TO_BE_CHUNKED_URL = urljoin(FILE_CHUNKED_FIXTURE_URL, "1.iso") + +FILE_CHUNKED_MANIFEST_URL = urljoin(FILE_CHUNKED_FIXTURE_URL, "PULP_MANIFEST") + +FILE_CHUNKED_PART_1_URL = urljoin(FILE_CHUNKED_FIXTURE_URL, "chunkaa") + +FILE_CHUNKED_PART_2_URL = urljoin(FILE_CHUNKED_FIXTURE_URL, "chunkab") + +FILE_FIXTURE_URL = urljoin(PULP_FIXTURES_BASE_URL, "file/") +"""The URL to a file repository.""" + +FILE_FIXTURE_WITH_MISSING_FILES_URL = urljoin(PULP_FIXTURES_BASE_URL, "file-manifest/") +"""The URL to a file repository with missing files.""" + +FILE_FIXTURE_WITH_MISSING_FILES_MANIFEST_URL = urljoin( + FILE_FIXTURE_WITH_MISSING_FILES_URL, "PULP_MANIFEST" +) +"""The URL to a file repository with missing files manifest.""" + +FILE_CHUNKED_FIXTURE_URL = urljoin(PULP_FIXTURES_BASE_URL, "file-chunked/") +"""The URL to a file repository.""" + +FILE_CHUNKED_FIXTURE_MANIFEST_URL = urljoin(FILE_CHUNKED_FIXTURE_URL, "PULP_MANIFEST") +"""The URL to a file repository manifest""" + +FILE_FIXTURE_MANIFEST_URL = urljoin(FILE_FIXTURE_URL, "PULP_MANIFEST") +"""The URL to a file repository manifest.""" + +FILE_FIXTURE_COUNT = 3 +"""The number of packages available at :data:`FILE_FIXTURE_URL`.""" + +FILE_FIXTURE_SUMMARY = {FILE_CONTENT_NAME: FILE_FIXTURE_COUNT} +"""The desired content summary after syncing :data:`FILE_FIXTURE_URL`.""" + +FILE2_FIXTURE_URL = urljoin(PULP_FIXTURES_BASE_URL, "file2/") +"""The URL to a file repository.""" + +FILE2_FIXTURE_MANIFEST_URL = urljoin(FILE2_FIXTURE_URL, "PULP_MANIFEST") +"""The URL to a file repository manifest""" + +FILE_MANY_FIXTURE_URL = urljoin(PULP_FIXTURES_BASE_URL, "file-many/") +"""The URL to a file repository containing many files.""" + +FILE_MANY_FIXTURE_MANIFEST_URL = urljoin(FILE_MANY_FIXTURE_URL, "PULP_MANIFEST") +"""The URL to a file repository manifest""" + +FILE_MANY_FIXTURE_COUNT = 250 +"""The number of packages available at :data:`FILE_MANY_FIXTURE_URL`.""" + +FILE_LARGE_FIXTURE_URL = urljoin(PULP_FIXTURES_BASE_URL, "file-large/") +"""The URL to a file repository containing a large number of files.""" + +FILE_LARGE_URL = urljoin(FILE_LARGE_FIXTURE_URL, "1.iso") +"""The URL to a large ISO file at :data:`FILE_LARGE_FIXTURE_URL`.""" + +FILE_LARGE_FIXTURE_COUNT = 10 +"""The number of packages available at :data:`FILE_LARGE_FIXTURE_URL`.""" + +FILE_LARGE_FIXTURE_MANIFEST_URL = urljoin(FILE_LARGE_FIXTURE_URL, "PULP_MANIFEST") +"""The URL to a file repository manifest.""" + +FILE_URL = urljoin(FILE_FIXTURE_URL, "1.iso") +"""The URL to an ISO file at :data:`FILE_FIXTURE_URL`.""" + +FILE2_URL = urljoin(FILE2_FIXTURE_URL, "1.iso") +"""The URL to an ISO file at :data:`FILE2_FIXTURE_URL`.""" + +_CURRENT_DIR = os.path.dirname(os.path.realpath(__file__)) + +X509_CERTS_BASE_PATH = os.path.join(_CURRENT_DIR, "artifacts", "x509", "certificates") +X509_CA_CERT_FILE_PATH = os.path.join(X509_CERTS_BASE_PATH, "ca.pem") + +#: All valid task states. +TASK_STATES = SimpleNamespace( + WAITING="waiting", + SKIPPED="skipped", + RUNNING="running", + COMPLETED="completed", + FAILED="failed", + CANCELED="canceled", + CANCELING="canceling", +) diff --git a/pulp_file/tests/functional/api/from_pulpcore/test_acs.py b/pulp_file/tests/functional/api/from_pulpcore/test_acs.py new file mode 100644 index 000000000..445b95b74 --- /dev/null +++ b/pulp_file/tests/functional/api/from_pulpcore/test_acs.py @@ -0,0 +1,107 @@ +import unittest + +from pulp_smash import config +from pulp_smash.pulp3.bindings import delete_orphans, monitor_task + +from pulpcore.client.pulp_file import AcsFileApi, RemotesFileApi +from pulpcore.client.pulp_file.exceptions import ApiException + +from pulp_file.tests.functional.utils import ( + gen_file_client, + gen_file_remote, +) + + +class AlternateContentSourceTestCase(unittest.TestCase): + @classmethod + def setUpClass(cls): + """ + Create class-wide variables. + + Variables 'paths' and 'paths_updated' are defined as strings. + In same way data are send from user. + """ + cls.cfg = config.get_config() + cls.file_client = gen_file_client() + cls.file_remote_api = RemotesFileApi(cls.file_client) + cls.file_acs_api = AcsFileApi(cls.file_client) + cls.paths = ["backupone/PULP_MANIFEST", "backuptwo/manifest"] + cls.paths_updated = ["backupone/test", "anotherbackup/PULP_MANIFEST"] + + @classmethod + def tearDownClass(cls): + delete_orphans() + + def test_create(self): + """ + Basic ACS create. + + 1. Try and fail to create ACS with remote with immediate policy + 2. Create ACS and check it exists + """ + remote_bad = self.file_remote_api.create(gen_file_remote()) + remote = self.file_remote_api.create(gen_file_remote(policy="on_demand")) + self.addCleanup(self.file_remote_api.delete, remote_bad.pulp_href) + self.addCleanup(self.file_remote_api.delete, remote.pulp_href) + + acs_data = { + "name": "alternatecontentsource", + "remote": remote_bad.pulp_href, + "paths": self.paths, + } + with self.assertRaises(ApiException) as ctx: + self.file_acs_api.create(acs_data) + self.assertEqual(ctx.exception.status, 400) + + acs_data["remote"] = remote.pulp_href + + acs = self.file_acs_api.create(acs_data) + self.addCleanup(self.file_acs_api.delete, acs.pulp_href) + + self.assertEqual(len(self.file_acs_api.list(name="alternatecontentsource").results), 1) + + def test_acs_update(self): + """ + ACS update. + + Test of update name and paths. + """ + remote = self.file_remote_api.create(gen_file_remote(policy="on_demand")) + self.addCleanup(self.file_remote_api.delete, remote.pulp_href) + + acs_data = { + "name": "alternatecontentsource", + "remote": remote.pulp_href, + "paths": self.paths, + } + acs = self.file_acs_api.create(acs_data) + self.addCleanup(self.file_acs_api.delete, acs.pulp_href) + + # update name + new_name = "acs" + response = self.file_acs_api.update(acs.pulp_href, {"name": new_name, "remote": acs.remote}) + monitor_task(response.task) + acs = self.file_acs_api.read(acs.pulp_href) + + self.assertEqual(acs.name, new_name) + # assert paths were not silently removed during name update + self.assertEqual(sorted(acs.paths), sorted(self.paths)) + + # partial update name + new_name = "new_acs" + response = self.file_acs_api.partial_update( + acs.pulp_href, {"name": new_name, "remote": acs.remote} + ) + monitor_task(response.task) + acs = self.file_acs_api.read(acs.pulp_href) + + self.assertEqual(acs.name, new_name) + + # update paths + response = self.file_acs_api.update( + acs.pulp_href, {"name": acs.name, "remote": acs.remote, "paths": self.paths_updated} + ) + monitor_task(response.task) + acs = self.file_acs_api.read(acs.pulp_href) + + self.assertEqual(sorted(acs.paths), sorted(self.paths_updated)) diff --git a/pulp_file/tests/functional/api/from_pulpcore/test_content_cache.py b/pulp_file/tests/functional/api/from_pulpcore/test_content_cache.py new file mode 100644 index 000000000..beda80d21 --- /dev/null +++ b/pulp_file/tests/functional/api/from_pulpcore/test_content_cache.py @@ -0,0 +1,171 @@ +"""Tests related to content cache.""" +import requests +import unittest +from urllib.parse import urljoin + +from pulp_smash.pulp3.bindings import monitor_task +from pulp_smash.pulp3.utils import gen_distribution, gen_repo + +from pulpcore.client.pulp_file import ( + ContentFilesApi, + RepositoryAddRemoveContent, + RepositorySyncURL, + RepositoriesFileApi, + RemotesFileApi, + PublicationsFileApi, + FileFilePublication, + DistributionsFileApi, + PatchedfileFileDistribution, +) + +from pulp_file.tests.functional.utils import gen_file_client, gen_file_remote, get_redis_status +from .constants import PULP_CONTENT_BASE_URL + +is_redis_connected = get_redis_status() + + +@unittest.skipUnless(is_redis_connected, "Could not connect to the Redis server") +class ContentCacheTestCache(unittest.TestCase): + """Test content cache""" + + @classmethod + def setUpClass(cls): + """Sets up class""" + client = gen_file_client() + cls.cont_api = ContentFilesApi(client) + cls.repo_api = RepositoriesFileApi(client) + cls.remote_api = RemotesFileApi(client) + cls.pub_api = PublicationsFileApi(client) + cls.dis_api = DistributionsFileApi(client) + + def setUp(self): + self.repo = self.repo_api.create(gen_repo(autopublish=True)) + self.remote = self.remote_api.create(gen_file_remote()) + + body = RepositorySyncURL(remote=self.remote.pulp_href) + created = monitor_task(self.repo_api.sync(self.repo.pulp_href, body).task).created_resources + self.repo = self.repo_api.read(self.repo.pulp_href) + self.pub1 = self.pub_api.read(created[1]) + body = FileFilePublication(repository=self.repo.pulp_href) + self.pub2 = self.pub_api.read( + monitor_task(self.pub_api.create(body).task).created_resources[0] + ) + self.pub3 = [] + response = self.dis_api.create(gen_distribution(repository=self.repo.pulp_href)) + self.distro = self.dis_api.read(monitor_task(response.task).created_resources[0]) + self.distro2 = [] + self.url = urljoin(PULP_CONTENT_BASE_URL, f"{self.distro.base_path}/") + + def tearDown(self): + a = self.remote_api.delete(self.remote.pulp_href).task + b = self.dis_api.delete(self.distro.pulp_href).task + for task_href in [a, b]: + monitor_task(task_href) + + def test_content_cache_workflow(self): + self._basic_cache_access() + self._remove_repository_invalidates() + self._restore_repository() + self._multiple_distributions() + self._invalidate_multiple_distributions() + self._delete_distribution_invalidates_one() + self._delete_extra_pub_doesnt_invalidate() + self._delete_served_pub_does_invalidate() + self._delete_repo_invalidates() + self._no_error_when_accessing_invalid_file() + + def _basic_cache_access(self): + """Checks responses are cached for content""" + files = ["", "", "PULP_MANIFEST", "PULP_MANIFEST", "1.iso", "1.iso"] + for i, file in enumerate(files): + self.assertEqual((200, "HIT" if i % 2 == 1 else "MISS"), self._check_cache(file), file) + + def _remove_repository_invalidates(self): + """Checks removing repository from distribution invalidates the cache""" + body = PatchedfileFileDistribution(repository="") + monitor_task(self.dis_api.partial_update(self.distro.pulp_href, body).task) + files = ["", "PULP_MANIFEST", "1.iso"] + for file in files: + self.assertEqual((404, None), self._check_cache(file), file) + + def _restore_repository(self): + """Checks that responses are cacheable after repository is added back""" + body = PatchedfileFileDistribution(repository=self.repo.pulp_href) + monitor_task(self.dis_api.partial_update(self.distro.pulp_href, body).task) + files = ["", "", "PULP_MANIFEST", "PULP_MANIFEST", "1.iso", "1.iso"] + for i, file in enumerate(files): + self.assertEqual((200, "HIT" if i % 2 == 1 else "MISS"), self._check_cache(file), file) + + def _multiple_distributions(self): + """Add a new distribution and check that its responses are cached separately""" + response = self.dis_api.create(gen_distribution(repository=self.repo.pulp_href)) + self.distro2.append(self.dis_api.read(monitor_task(response.task).created_resources[0])) + url = urljoin(PULP_CONTENT_BASE_URL, f"{self.distro2[0].base_path}/") + files = ["", "", "PULP_MANIFEST", "PULP_MANIFEST", "1.iso", "1.iso"] + for i, file in enumerate(files): + self.assertEqual( + (200, "HIT" if i % 2 == 1 else "MISS"), self._check_cache(file, url), file + ) + + def _invalidate_multiple_distributions(self): + """Test that updating a repository pointed by multiple distributions invalidates all""" + url = urljoin(PULP_CONTENT_BASE_URL, f"{self.distro2[0].base_path}/") + cfile = self.cont_api.list( + relative_path="1.iso", repository_version=self.repo.latest_version_href + ).results[0] + body = RepositoryAddRemoveContent(remove_content_units=[cfile.pulp_href]) + response = monitor_task(self.repo_api.modify(self.repo.pulp_href, body).task) + self.pub3.append(self.pub_api.read(response.created_resources[1])) + files = ["", "", "PULP_MANIFEST", "PULP_MANIFEST", "2.iso", "2.iso"] + for i, file in enumerate(files): + self.assertEqual((200, "HIT" if i % 2 == 1 else "MISS"), self._check_cache(file), file) + self.assertEqual( + (200, "HIT" if i % 2 == 1 else "MISS"), self._check_cache(file, url), file + ) + + def _delete_distribution_invalidates_one(self): + """Tests that deleting one distribution sharing a repository only invalidates its cache""" + url = urljoin(PULP_CONTENT_BASE_URL, f"{self.distro2[0].base_path}/") + monitor_task(self.dis_api.delete(self.distro2[0].pulp_href).task) + files = ["", "PULP_MANIFEST", "2.iso"] + for file in files: + self.assertEqual((200, "HIT"), self._check_cache(file), file) + self.assertEqual((404, None), self._check_cache(file, url), file) + + def _delete_extra_pub_doesnt_invalidate(self): + """Test that deleting a publication not being served doesn't invalidate cache""" + self.pub_api.delete(self.pub2.pulp_href) + files = ["", "PULP_MANIFEST", "2.iso"] + for file in files: + self.assertEqual((200, "HIT"), self._check_cache(file), file) + + def _delete_served_pub_does_invalidate(self): + """Test that deleting the serving publication does invalidate the cache""" + # Reverts back to serving self.pub1 + self.pub_api.delete(self.pub3[0].pulp_href) + files = ["", "", "PULP_MANIFEST", "PULP_MANIFEST", "2.iso", "2.iso"] + for i, file in enumerate(files): + self.assertEqual((200, "HIT" if i % 2 == 1 else "MISS"), self._check_cache(file), file) + + def _delete_repo_invalidates(self): + """Tests that deleting a repository invalidates the cache""" + monitor_task(self.repo_api.delete(self.repo.pulp_href).task) + files = ["", "PULP_MANIFEST", "2.iso"] + for file in files: + self.assertEqual((404, None), self._check_cache(file), file) + + def _no_error_when_accessing_invalid_file(self): + """Tests that accessing a file that doesn't exist on content app gives 404""" + files = ["invalid", "another/bad-one", "DNE/"] + url = PULP_CONTENT_BASE_URL + for file in files: + self.assertEqual((404, None), self._check_cache(file, url=url), file) + + def _check_cache(self, file, url=None): + """Helper to check if cache miss or hit""" + url = urljoin(url or self.url, file) + r = requests.get(url) + if r.history: + r = r.history[0] + return 200 if r.status_code == 302 else r.status_code, r.headers.get("X-PULP-CACHE") + return r.status_code, r.headers.get("X-PULP-CACHE") diff --git a/pulp_file/tests/functional/api/from_pulpcore/test_content_delivery.py b/pulp_file/tests/functional/api/from_pulpcore/test_content_delivery.py new file mode 100644 index 000000000..94a27cb2e --- /dev/null +++ b/pulp_file/tests/functional/api/from_pulpcore/test_content_delivery.py @@ -0,0 +1,181 @@ +"""Tests related to content delivery.""" +import hashlib +import unittest +from random import choice +from urllib.parse import urljoin + +from pulp_smash import api, config, utils +from pulp_smash.pulp3.bindings import delete_orphans, monitor_task, PulpTestCase +from pulp_smash.pulp3.constants import ON_DEMAND_DOWNLOAD_POLICIES +from pulp_smash.pulp3.utils import ( + download_content_unit, + gen_distribution, + gen_repo, + get_content, + sync, +) +from requests import HTTPError + +from pulpcore.client.pulp_file import ( + PublicationsFileApi, + RemotesFileApi, + RepositoriesFileApi, + RepositorySyncURL, + DistributionsFileApi, +) + +from pulp_file.tests.functional.utils import ( + create_file_publication, + gen_file_remote, + gen_file_client, +) +from .constants import ( + FILE_CONTENT_NAME, + FILE_DISTRIBUTION_PATH, + FILE_FIXTURE_URL, + FILE_FIXTURE_MANIFEST_URL, + FILE_FIXTURE_WITH_MISSING_FILES_MANIFEST_URL, + FILE_REMOTE_PATH, + FILE_REPO_PATH, +) + + +class ContentDeliveryTestCase(unittest.TestCase): + """Content delivery breaks when delete remote - lazy download policy. + + Deleting a remote that was used in a sync with either the on_demand or + streamed options can break published data. Specifically, clients who want + to fetch content that a remote was providing access to would begin to + 404. Recreating a remote and re-triggering a sync will cause these broken + units to recover again. + + This test targets the following issue: + + * `Pulp #4464 `_ + """ + + def test_content_remote_delete(self): + """Assert that an HTTP error is raised when remote is deleted. + + Also verify that the content can be downloaded from Pulp once the + remote is recreated and another sync is triggered. + """ + cfg = config.get_config() + delete_orphans() + client = api.Client(cfg, api.page_handler) + + repo = client.post(FILE_REPO_PATH, gen_repo()) + self.addCleanup(client.delete, repo["pulp_href"]) + + body = gen_file_remote(policy=choice(ON_DEMAND_DOWNLOAD_POLICIES)) + remote = client.post(FILE_REMOTE_PATH, body) + + # Sync the repository using a lazy download policy. + sync(cfg, remote, repo) + repo = client.get(repo["pulp_href"]) + + publication = create_file_publication(cfg, repo) + self.addCleanup(client.delete, publication["pulp_href"]) + + # Delete the remote. + client.delete(remote["pulp_href"]) + + body = gen_distribution() + body["publication"] = publication["pulp_href"] + distribution = client.using_handler(api.task_handler).post(FILE_DISTRIBUTION_PATH, body) + self.addCleanup(client.delete, distribution["pulp_href"]) + + unit_path = choice( + [content_unit["relative_path"] for content_unit in get_content(repo)[FILE_CONTENT_NAME]] + ) + + # Assert that an HTTP error is raised when one to fetch content from + # the distribution once the remote was removed. + with self.assertRaises(HTTPError) as ctx: + download_content_unit(cfg, distribution, unit_path) + for key in ("not", "found"): + self.assertIn(key, ctx.exception.response.reason.lower()) + + # Recreating a remote and re-triggering a sync will cause these broken + # units to recover again. + body = gen_file_remote(policy=choice(ON_DEMAND_DOWNLOAD_POLICIES)) + remote = client.post(FILE_REMOTE_PATH, body) + self.addCleanup(client.delete, remote["pulp_href"]) + + sync(cfg, remote, repo) + + content = download_content_unit(cfg, distribution, unit_path) + pulp_hash = hashlib.sha256(content).hexdigest() + + fixtures_hash = hashlib.sha256( + utils.http_get(urljoin(FILE_FIXTURE_URL, unit_path)) + ).hexdigest() + + self.assertEqual(pulp_hash, fixtures_hash) + + +class RemoteArtifactUpdateTestCase(PulpTestCase): + @classmethod + def setUpClass(cls): + """Clean out Pulp before testing.""" + delete_orphans() + client = gen_file_client() + cls.repo_api = RepositoriesFileApi(client) + cls.remote_api = RemotesFileApi(client) + cls.publication_api = PublicationsFileApi(client) + cls.distributions_api = DistributionsFileApi(client) + cls.cfg = config.get_config() + + def tearDown(self): + """Clean up Pulp after testing.""" + self.doCleanups() + delete_orphans() + + def test_remote_artifact_url_update(self): + """Test that downloading on_demand content works after a repository layout change.""" + + FILE_NAME = "1.iso" + + # 1. Create a remote, repository and distribution - remote URL has links that should 404 + remote_config = gen_file_remote( + policy="on_demand", url=FILE_FIXTURE_WITH_MISSING_FILES_MANIFEST_URL + ) + remote = self.remote_api.create(remote_config) + self.addCleanup(self.remote_api.delete, remote.pulp_href) + + repo = self.repo_api.create(gen_repo(autopublish=True, remote=remote.pulp_href)) + self.addCleanup(self.repo_api.delete, repo.pulp_href) + + body = gen_distribution(repository=repo.pulp_href) + distribution_response = self.distributions_api.create(body) + created_resources = monitor_task(distribution_response.task).created_resources + distribution = self.distributions_api.read(created_resources[0]) + self.addCleanup(self.distributions_api.delete, distribution.pulp_href) + + # 2. Sync the repository, verify that downloading artifacts fails + repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) + + sync_response = self.repo_api.sync(repo.pulp_href, repository_sync_data) + monitor_task(sync_response.task) + + with self.assertRaises(HTTPError): + download_content_unit(self.cfg, distribution.to_dict(), FILE_NAME) + + # 3. Update the remote URL with one that works, sync again, check that downloading + # artifacts works. + update_response = self.remote_api.update( + remote.pulp_href, gen_file_remote(policy="on_demand", url=FILE_FIXTURE_MANIFEST_URL) + ) + monitor_task(update_response.task) + + sync_response = self.repo_api.sync(repo.pulp_href, repository_sync_data) + monitor_task(sync_response.task) + + content = download_content_unit(self.cfg, distribution.to_dict(), FILE_NAME) + pulp_hash = hashlib.sha256(content).hexdigest() + + fixtures_hash = hashlib.sha256( + utils.http_get(urljoin(FILE_FIXTURE_URL, FILE_NAME)) + ).hexdigest() + + self.assertEqual(pulp_hash, fixtures_hash) diff --git a/pulp_file/tests/functional/api/from_pulpcore/test_content_guard.py b/pulp_file/tests/functional/api/from_pulpcore/test_content_guard.py new file mode 100644 index 000000000..21d281bc9 --- /dev/null +++ b/pulp_file/tests/functional/api/from_pulpcore/test_content_guard.py @@ -0,0 +1,139 @@ +import requests +import unittest + +from urllib.parse import urljoin + +from pulp_smash import config, utils +from pulp_smash.pulp3.bindings import monitor_task +from pulp_smash.pulp3.utils import gen_distribution + +from pulpcore.client.pulpcore import ( + ApiClient as CoreApiClient, + GroupsApi, + GroupsUsersApi, + ContentguardsRbacApi, +) +from pulpcore.client.pulp_file import ( + DistributionsFileApi, + PatchedfileFileDistribution, +) + +from pulp_file.tests.functional.utils import ( + gen_file_client, + gen_user_rest, + del_user_rest, +) +from .constants import PULP_CONTENT_BASE_URL + + +class RBACContentGuardTestCase(unittest.TestCase): + """Test RBAC enabled content guard""" + + CREATOR_ROLE = "core.rbaccontentguard_creator" + DOWNLOAD_ROLE = "core.rbaccontentguard_downloader" + + @classmethod + def setUpClass(cls): + cls.client = gen_file_client() # This is admin client, following apis are for admin user + cls.api_config = config.get_config().get_bindings_config() + core_client = CoreApiClient(config.get_config().get_bindings_config()) + cls.groups_api = GroupsApi(core_client) + cls.group_users_api = GroupsUsersApi(core_client) + cls.distro_api = DistributionsFileApi(cls.client) + + def setUp(self): + response = monitor_task(self.distro_api.create(gen_distribution()).task) + self.distro = self.distro_api.read(response.created_resources[0]) + self.rbac_guard_api = ContentguardsRbacApi(self.client) + + self.admin = { + "username": self.client.configuration.username, + "password": self.client.configuration.password, + } + user = gen_user_rest(model_roles=["core.rbaccontentguard_creator"]) + self.api_config.username = user["username"] + self.api_config.password = user["password"] + user["rbac_guard_api"] = ContentguardsRbacApi(CoreApiClient(self.api_config)) + self.creator_user = user + self.user_a = gen_user_rest() + self.user_b = gen_user_rest() + self.all_users = [self.creator_user, self.user_a, self.user_a, self.admin, None] + + self.group = self.groups_api.create({"name": utils.uuid4()}) + self.group_users_api.create(self.group.pulp_href, {"username": self.user_b["username"]}) + self.group_users_api.create(self.group.pulp_href, {"username": self.user_a["username"]}) + + self.url = urljoin(PULP_CONTENT_BASE_URL, f"{self.distro.base_path}/") + + def tearDown(self): + self.distro_api.delete(self.distro.pulp_href) + self.rbac_guard_api.delete(self.distro.content_guard) + self.groups_api.delete(self.group.pulp_href) + del_user_rest(self.creator_user["pulp_href"]) + del_user_rest(self.user_a["pulp_href"]) + del_user_rest(self.user_b["pulp_href"]) + + def test_workflow(self): + self._all_users_access() + self._content_guard_creation() + self._only_creator_access() + self._add_users() + self._remove_users() + self._add_group() + self._remove_group() + + def _all_users_access(self): + """Sanity check that all users can access distribution with no content guard""" + self._assert_access(self.all_users) + + def _content_guard_creation(self): + """Checks that RBAC ContentGuard can be created and assigned to a distribution""" + guard = self.creator_user["rbac_guard_api"].create({"name": self.distro.name}) + body = PatchedfileFileDistribution(content_guard=guard.pulp_href) + monitor_task(self.distro_api.partial_update(self.distro.pulp_href, body).task) + self.distro = self.distro_api.read(self.distro.pulp_href) + self.assertEqual(guard.pulp_href, self.distro.content_guard) + + def _only_creator_access(self): + """Checks that now only the creator and admin user can access the distribution""" + self._assert_access([self.creator_user, self.admin]) + + def _add_users(self): + """Use the /add/ endpoint to give the users permission to access distribution""" + body = { + "users": (self.user_a["username"], self.user_b["username"]), + "role": self.DOWNLOAD_ROLE, + } + self.creator_user["rbac_guard_api"].add_role(self.distro.content_guard, body) + self._assert_access([self.creator_user, self.user_b, self.user_a, self.admin]) + + def _remove_users(self): + """Use the /remove/ endpoint to remove users permission to access distribution""" + body = { + "users": (self.user_a["username"], self.user_b["username"]), + "role": self.DOWNLOAD_ROLE, + } + self.creator_user["rbac_guard_api"].remove_role(self.distro.content_guard, body) + self._assert_access([self.creator_user, self.admin]) + + def _add_group(self): + """Use the /add/ endpoint to add group""" + body = {"groups": [self.group.name], "role": self.DOWNLOAD_ROLE} + self.creator_user["rbac_guard_api"].add_role(self.distro.content_guard, body) + self._assert_access([self.creator_user, self.user_b, self.user_a, self.admin]) + + def _remove_group(self): + """Use the /remove/ endpoint to remove group""" + body = {"groups": [self.group.name], "role": self.DOWNLOAD_ROLE} + self.creator_user["rbac_guard_api"].remove_role(self.distro.content_guard, body) + self._assert_access([self.creator_user, self.admin]) + + def _assert_access(self, auth_users): + """Helper for asserting functionality and correct permissions on the content guard""" + for user in self.all_users: + auth = (user["username"], user["password"]) if user else None + r = requests.session() + r.trust_env = False # Don't read the .netrc file + response = r.get(self.url, auth=auth) + expected_status = 404 if user in auth_users else 403 + self.assertEqual(response.status_code, expected_status, f"Failed on {user=}") diff --git a/pulp_file/tests/functional/api/from_pulpcore/test_content_path.py b/pulp_file/tests/functional/api/from_pulpcore/test_content_path.py new file mode 100644 index 000000000..11535c94b --- /dev/null +++ b/pulp_file/tests/functional/api/from_pulpcore/test_content_path.py @@ -0,0 +1,57 @@ +"""Tests related to content path.""" +import unittest + +from pulp_smash import api, config +from pulp_smash.pulp3.bindings import delete_orphans +from pulp_smash.pulp3.utils import gen_remote, gen_repo, sync + +from pulp_file.tests.functional.utils import create_file_publication +from .constants import ( + FILE_FIXTURE_MANIFEST_URL, + FILE_REMOTE_PATH, + FILE_REPO_PATH, +) + + +class SyncPublishContentPathTestCase(unittest.TestCase): + """Test whether sync/publish for content already in Pulp. + + Different code paths are used in Pulp for the cases when artifacts are + already present on the filesystem during sync and when they are not + downloaded yet + + This test targets the following issue: + + `Pulp #4442 `_ + + Does the following: + + 1. Assure that no content from repository A is downloaded. + 2. Sync/publish repository A with download policy immediate. + 3. Sync/publish repository A again with download policy immediate. + 4. No failure in 2 shows that sync went fine when content was + not present on the disk and in the database. + 5. No failure in 3 shows that sync went fine when content was already + present on the disk and in the database. + + """ + + def test_all(self): + """Test whether sync/publish for content already in Pulp.""" + cfg = config.get_config() + client = api.Client(cfg, api.page_handler) + + # step 1. delete orphans to assure that no content is present on disk, + # or database. + delete_orphans() + + remote = client.post(FILE_REMOTE_PATH, gen_remote(FILE_FIXTURE_MANIFEST_URL)) + self.addCleanup(client.delete, remote["pulp_href"]) + + repo = client.post(FILE_REPO_PATH, gen_repo()) + self.addCleanup(client.delete, repo["pulp_href"]) + + for _ in range(2): + sync(cfg, remote, repo) + repo = client.get(repo["pulp_href"]) + create_file_publication(cfg, repo) diff --git a/pulp_file/tests/functional/api/from_pulpcore/test_content_promotion.py b/pulp_file/tests/functional/api/from_pulpcore/test_content_promotion.py new file mode 100644 index 000000000..42ea27c0d --- /dev/null +++ b/pulp_file/tests/functional/api/from_pulpcore/test_content_promotion.py @@ -0,0 +1,101 @@ +"""Tests related to content promotion.""" +import hashlib +import unittest +from urllib.parse import urljoin + +from pulp_smash import api, config +from pulp_smash.pulp3.utils import gen_distribution, gen_remote, gen_repo, get_added_content, sync + +from pulp_file.tests.functional.utils import create_file_publication +from .constants import ( + FILE_CONTENT_NAME, + FILE_DISTRIBUTION_PATH, + FILE_FIXTURE_MANIFEST_URL, + FILE_REMOTE_PATH, + FILE_REPO_PATH, + PULP_CONTENT_BASE_URL, +) + + +class ContentPromotionTestCase(unittest.TestCase): + """Test content promotion.""" + + def test_all(self): + """Test content promotion for a distribution. + + This test targets the following issue: + + * `Pulp #4186 `_ + * `Pulp #8475 `_ + * `Pulp #8760 `_ + + Do the following: + + 1. Create a repository that has at least one repository version. + 2. Create a publication. + 3. Create 2 distributions - using the same publication. Those + distributions will have different ``base_path``. + 4. Assert that distributions have the same publication. + 5. Create another distribution using same repository version. + 5. Assert that distributions are viewable from base url + 6. Assert that content in distributions are viewable + 7. Select a content unit. Download that content unit from Pulp using + the three different distributions. + Assert that content unit has the same checksum when fetched from + different distributions. + """ + cfg = config.get_config() + client = api.Client(cfg, api.json_handler) + + repo = client.post(FILE_REPO_PATH, gen_repo()) + self.addCleanup(client.delete, repo["pulp_href"]) + + remote = client.post(FILE_REMOTE_PATH, gen_remote(FILE_FIXTURE_MANIFEST_URL)) + self.addCleanup(client.delete, remote["pulp_href"]) + + sync(cfg, remote, repo) + repo = client.get(repo["pulp_href"]) + + publication = create_file_publication(cfg, repo) + self.addCleanup(client.delete, publication["pulp_href"]) + + distributions = [] + for _ in range(2): + body = gen_distribution() + body["publication"] = publication["pulp_href"] + distribution = client.using_handler(api.task_handler).post(FILE_DISTRIBUTION_PATH, body) + distributions.append(distribution) + self.addCleanup(client.delete, distribution["pulp_href"]) + + self.assertEqual( + distributions[0]["publication"], distributions[1]["publication"], distributions + ) + + body = gen_distribution() + body["repository"] = repo["pulp_href"] + distribution = client.using_handler(api.task_handler).post(FILE_DISTRIBUTION_PATH, body) + distributions.append(distribution) + self.addCleanup(client.delete, distribution["pulp_href"]) + + client.response_handler = api.safe_handler + self.assertEqual(client.get(PULP_CONTENT_BASE_URL).status_code, 200) + + for distribution in distributions: + self.assertEqual(client.get(distribution["base_url"]).status_code, 200) + + unit_urls = [] + unit_path = get_added_content(repo)[FILE_CONTENT_NAME][0]["relative_path"] + for distribution in distributions: + unit_url = distribution["base_url"] + unit_urls.append(urljoin(unit_url, unit_path)) + + self.assertEqual( + hashlib.sha256(client.get(unit_urls[0]).content).hexdigest(), + hashlib.sha256(client.get(unit_urls[1]).content).hexdigest(), + unit_urls, + ) + self.assertEqual( + hashlib.sha256(client.get(unit_urls[0]).content).hexdigest(), + hashlib.sha256(client.get(unit_urls[2]).content).hexdigest(), + unit_urls, + ) diff --git a/pulp_file/tests/functional/api/from_pulpcore/test_crd_publications.py b/pulp_file/tests/functional/api/from_pulpcore/test_crd_publications.py new file mode 100644 index 000000000..f0608b685 --- /dev/null +++ b/pulp_file/tests/functional/api/from_pulpcore/test_crd_publications.py @@ -0,0 +1,213 @@ +"""Tests that perform actions over publications.""" +import unittest +from itertools import permutations + +from pulp_smash import api, config +from pulp_smash.pulp3.utils import gen_repo, get_content, modify_repo, sync +from requests.exceptions import HTTPError + +from pulp_file.tests.functional.utils import ( + create_file_publication, + gen_file_remote, + parse_date_from_string, +) +from .constants import ( + FILE_CONTENT_NAME, + FILE_PUBLICATION_PATH, + FILE_REMOTE_PATH, + FILE_REPO_PATH, +) + + +class PublicationsTestCase(unittest.TestCase): + """Perform actions over publications.""" + + def setUp(self): + """Create class-wide variables.""" + self.cfg = config.get_config() + self.client = api.Client(self.cfg, api.page_handler) + self.client_echo = api.Client(self.cfg, api.echo_handler) + self.remote = {} + self.publication = {} + self.repo = {} + try: + self.repo.update(self.client.post(FILE_REPO_PATH, gen_repo())) + self.repo_initial_version = self.repo["latest_version_href"] + body = gen_file_remote() + self.remote.update(self.client.post(FILE_REMOTE_PATH, body)) + sync(self.cfg, self.remote, self.repo) + # update to get latest_version_href + self.repo.update(self.client.get(self.repo["pulp_href"])) + except Exception: + self.tearDown() + raise + + def tearDown(self): + """Clean class-wide variables.""" + for resource in (self.remote, self.repo): + if resource: + self.client.delete(resource["pulp_href"]) + + def test_workflow(self): + self._create_file_publication() + self._read_publication() + self._read_publication_with_specific_fields() + self._read_publication_without_specific_fields() + self._read_publications_filter_repo_version() + self._read_publications_filter_repo_version_no_match() + self._read_publications_filter_repo_version_invalid() + self._read_publications_filter_created_time() + self._read_publications_filter_created_time_no_match() + self._publication_create_order() + self._delete() + + def _create_file_publication(self): + """Create a publication.""" + self.publication.update(create_file_publication(self.cfg, self.repo)) + + def _read_publication(self): + """Read a publication by its href.""" + publication = self.client.get(self.publication["pulp_href"]) + for key, val in self.publication.items(): + with self.subTest(key=key): + self.assertEqual(publication[key], val) + + def _read_publication_with_specific_fields(self): + """Read a publication by its href providing specific field list. + + Permutate field list to ensure different combinations on result. + """ + fields = ("pulp_href", "pulp_created", "distributions") + for field_pair in permutations(fields, 2): + # ex: field_pair = ('pulp_href', 'pulp_created) + with self.subTest(field_pair=field_pair): + publication = self.client.get( + self.publication["pulp_href"], params={"fields": ",".join(field_pair)} + ) + self.assertEqual(sorted(field_pair), sorted(publication.keys())) + + def _read_publication_without_specific_fields(self): + """Read a publication by its href excluding specific fields.""" + # requests doesn't allow the use of != in parameters. + url = "{}?exclude_fields=distributions".format(self.publication["pulp_href"]) + publication = self.client.get(url) + self.assertNotIn("distributions", publication.keys()) + + def _read_publications_filter_repo_version(self): + """Read a publication by its repository version.""" + publications = self.client.get( + FILE_PUBLICATION_PATH, params={"repository_version": self.repo["latest_version_href"]} + ) + self.assertEqual(len(publications), 1, publications) + for key, val in self.publication.items(): + with self.subTest(key=key): + self.assertEqual(publications[0][key], val) + + def _read_publications_filter_repo_version_no_match(self): + """Filter by repo version for which no publication exists.""" + publications = self.client.get( + FILE_PUBLICATION_PATH, params={"repository_version": self.repo_initial_version} + ) + self.assertFalse(publications) + + def _read_publications_filter_repo_version_invalid(self): + """Filter by a repo version that does not exist.""" + invalid_repo_version = self.repo["versions_href"] + "123456789/" + response = self.client_echo.get( + FILE_PUBLICATION_PATH, params={"repository_version": invalid_repo_version} + ) + self.assertEqual(response.status_code, 400) + self.assertIn("not found for repositoryversion", response.text) + + def _read_publications_filter_created_time(self): + """Read a publication by its created time.""" + publications = self.client.get( + FILE_PUBLICATION_PATH, params={"pulp_created": self.publication["pulp_created"]} + ) + self.assertEqual(len(publications), 1, publications) + for key, val in self.publication.items(): + with self.subTest(key=key): + self.assertEqual(publications[0][key], val) + + def _read_publications_filter_created_time_no_match(self): + """Filter for created time for which no publication exists.""" + publications = self.client.get( + FILE_PUBLICATION_PATH, params={"pulp_created": self.repo["pulp_created"]} + ) + self.assertFalse(publications) + + def _publication_create_order(self): + """Assert that publications are ordered by created time.""" + # Create more 2 publications for the same repo + for _ in range(2): + create_file_publication(self.cfg, self.repo) + + # Read publications + publications = self.client.get(FILE_PUBLICATION_PATH) + self.assertEqual(len(publications), 3) + + # Assert publications are ordered by pulp_created field in descending order + for i, publication in enumerate(publications[:-1]): + self.assertGreater( + parse_date_from_string(publication["pulp_created"]), # Current + parse_date_from_string(publications[i + 1]["pulp_created"]), # Prev + ) + + def _delete(self): + """Delete a publication.""" + self.client.delete(self.publication["pulp_href"]) + with self.assertRaises(HTTPError): + self.client.get(self.publication["pulp_href"]) + + +class PublicationRepositoryParametersTestCase(unittest.TestCase): + """Explore publication creation using repository and repository version.""" + + @classmethod + def setUpClass(cls): + """Create class-wide variables.""" + cls.cfg = config.get_config() + cls.client = api.Client(cls.cfg) + + def test_create_only_using_repoversion(self): + """Create a publication only using repository version.""" + repo = self.create_sync_repo() + for file_content in get_content(repo)[FILE_CONTENT_NAME]: + modify_repo(self.cfg, repo, remove_units=[file_content]) + version_href = self.client.get(repo["versions_href"])[1]["pulp_href"] + publication = create_file_publication(self.cfg, repo, version_href) + self.addCleanup(self.client.delete, publication["pulp_href"]) + + self.assertEqual(publication["repository_version"], version_href, publication) + + def test_create_repo_repoversion(self): + """Create a publication using repository and repository version.""" + repo = self.create_sync_repo() + version_href = self.client.get(repo["versions_href"])[0]["pulp_href"] + + with self.assertRaises(HTTPError) as ctx: + self.client.using_handler(api.json_handler).post( + FILE_PUBLICATION_PATH, + {"repository_version": version_href, "repository": repo["pulp_href"]}, + ) + + for key in ("repository", "repository_version", "not", "both"): + self.assertIn( + key, + ctx.exception.response.json()["non_field_errors"][0].lower(), + ctx.exception.response, + ) + + def create_sync_repo(self): + """Create and sync a repository. + + Given the number of times to be synced. + """ + repo = self.client.post(FILE_REPO_PATH, gen_repo()) + self.addCleanup(self.client.delete, repo["pulp_href"]) + + remote = self.client.post(FILE_REMOTE_PATH, gen_file_remote()) + self.addCleanup(self.client.delete, remote["pulp_href"]) + + sync(self.cfg, remote, repo) + return self.client.get(repo["pulp_href"]) diff --git a/pulp_file/tests/functional/api/from_pulpcore/test_crud_repos.py b/pulp_file/tests/functional/api/from_pulpcore/test_crud_repos.py new file mode 100644 index 000000000..915cbafb6 --- /dev/null +++ b/pulp_file/tests/functional/api/from_pulpcore/test_crud_repos.py @@ -0,0 +1,458 @@ +"""Tests that CRUD repositories.""" +import json +import re +import time +import unittest +from itertools import permutations +from urllib.parse import urljoin + +from pulp_smash import api, cli, config, utils +from pulp_smash.pulp3.bindings import monitor_task +from pulp_smash.pulp3.utils import gen_repo + +from requests.exceptions import HTTPError +import pytest + +from pulpcore.client.pulp_file.exceptions import ApiException +from pulpcore.client.pulp_file import ( + ApiClient as FileApiClient, + FileFileRemote, + RemotesFileApi, +) + +from pulp_file.tests.functional.utils import gen_file_remote +from .constants import ( + FILE_FIXTURE_MANIFEST_URL, + FILE_REMOTE_PATH, + FILE_REPO_PATH, +) + + +class CRUDRepoTestCase(unittest.TestCase): + """CRUD repositories.""" + + @classmethod + def setUpClass(cls): + """Create class-wide variables.""" + cls.cfg = config.get_config() + cls.client = api.Client(cls.cfg, api.json_handler) + + def setUp(self): + self.repo = {} + + def test_workflow(self): + self._create_repo() + self._create_same_name() + self._read_repo() + self._read_repo_with_specific_fields() + self._read_repo_without_specific_fields() + self._read_repos() + self._read_all_repos() + self._fully_update_name() + self._fully_update_desc() + self._partially_update_name() + self._partially_update_desc() + self._set_remote_on_repository() + self._delete_repo() + + def _create_repo(self): + """Create repository.""" + self.repo = self.client.post(FILE_REPO_PATH, gen_repo()) + + def _create_same_name(self): + """Try to create a second reIpository with an identical name.""" + with self.assertRaises(HTTPError) as exc: + self.client.post(FILE_REPO_PATH, gen_repo(name=self.repo["name"])) + self.assertIn("unique", exc.exception.response.text) + self.assertEqual(exc.exception.response.status_code, 400) + + def _read_repo(self): + """Read a repository by its href.""" + repo = self.client.get(self.repo["pulp_href"]) + for key, val in self.repo.items(): + with self.subTest(key=key): + self.assertEqual(repo[key], val) + + def _read_repo_with_specific_fields(self): + """Read a repository by its href providing specific field list. + + Permutate field list to ensure different combinations on result. + """ + fields = ( + "pulp_href", + "pulp_created", + "versions_href", + "latest_version_href", + "name", + "description", + ) + for field_pair in permutations(fields, 2): + # ex: field_pair = ('pulp_href', 'created') + with self.subTest(field_pair=field_pair): + repo = self.client.get( + self.repo["pulp_href"], params={"fields": ",".join(field_pair)} + ) + self.assertEqual(sorted(field_pair), sorted(repo.keys())) + + def _read_repo_without_specific_fields(self): + """Read a repo by its href excluding specific fields.""" + # requests doesn't allow the use of != in parameters. + url = "{}?exclude_fields=created,name".format(self.repo["pulp_href"]) + repo = self.client.get(url) + response_fields = repo.keys() + self.assertNotIn("created", response_fields) + self.assertNotIn("name", response_fields) + + def _read_repos(self): + """Read the repository by its name.""" + page = self.client.get(FILE_REPO_PATH, params={"name": self.repo["name"]}) + self.assertEqual(len(page["results"]), 1) + for key, val in self.repo.items(): + with self.subTest(key=key): + self.assertEqual(page["results"][0][key], val) + + def _read_all_repos(self): + """Ensure name is displayed when listing repositories.""" + for repo in self.client.get(FILE_REPO_PATH)["results"]: + self.assertIsNotNone(repo["name"]) + + def _fully_update_name(self): + """Update a repository's name using HTTP PUT.""" + self._do_fully_update_attr("name") + + def _fully_update_desc(self): + """Update a repository's description using HTTP PUT.""" + self._do_fully_update_attr("description") + + def _do_fully_update_attr(self, attr): + """Update a repository attribute using HTTP PUT. + + :param attr: The name of the attribute to update. For example, + "description." The attribute to update must be a string. + """ + repo = self.client.get(self.repo["pulp_href"]) + string = utils.uuid4() + repo[attr] = string + self.client.put(repo["pulp_href"], repo) + + # verify the update + repo = self.client.get(repo["pulp_href"]) + self.assertEqual(string, repo[attr]) + + def _partially_update_name(self): + """Update a repository's name using HTTP PATCH.""" + self._do_partially_update_attr("name") + + def _partially_update_desc(self): + """Update a repository's description using HTTP PATCH.""" + self._do_partially_update_attr("description") + + def _do_partially_update_attr(self, attr): + """Update a repository attribute using HTTP PATCH. + + :param attr: The name of the attribute to update. For example, + "description." The attribute to update must be a string. + """ + string = utils.uuid4() + self.client.patch(self.repo["pulp_href"], {attr: string}) + + # verify the update + repo = self.client.get(self.repo["pulp_href"]) + self.assertEqual(repo[attr], string) + + def _set_remote_on_repository(self): + """Test setting remotes on repositories.""" + body = gen_file_remote() + remote = self.client.post(FILE_REMOTE_PATH, body) + self.addCleanup(self.client.delete, remote["pulp_href"]) + + # verify that syncing with no remote raises an error + with self.assertRaises(HTTPError): + self.client.post(urljoin(self.repo["pulp_href"], "sync/")) + + # test setting the remote on the repo + self.client.patch(self.repo["pulp_href"], {"remote": remote["pulp_href"]}) + + # test syncing without a remote + self.client.post(urljoin(self.repo["pulp_href"], "sync/")) + + repo = self.client.get(self.repo["pulp_href"]) + self.assertEqual(repo["latest_version_href"], f"{repo['pulp_href']}versions/1/") + + def _delete_repo(self): + """Delete a repository.""" + self.client.delete(self.repo["pulp_href"]) + + # verify the delete + with self.assertRaises(HTTPError): + self.client.get(self.repo["pulp_href"]) + + def test_negative_create_repo_with_invalid_parameter(self): + """Attempt to create repository passing extraneous invalid parameter. + + Assert response returns an error 400 including ["Unexpected field"]. + """ + response = api.Client(self.cfg, api.echo_handler).post(FILE_REPO_PATH, gen_repo(foo="bar")) + assert response.status_code == 400 + assert response.json()["foo"] == ["Unexpected field"] + + +class CRUDRemoteTestCase(unittest.TestCase): + """CRUD remotes.""" + + @classmethod + def setUpClass(cls): + """Create class-wide variables.""" + cls.cfg = config.get_config() + cls.client = FileApiClient(cls.cfg.get_bindings_config()) + cls.remotes_api = RemotesFileApi(cls.client) + + def setUp(self): + self.remote_attrs = { + "name": utils.uuid4(), + "url": FILE_FIXTURE_MANIFEST_URL, + "ca_cert": None, + "client_cert": None, + "client_key": None, + "tls_validation": False, + "proxy_url": None, + "username": "pulp", + "password": "pulp", + "download_concurrency": 10, + "policy": "on_demand", + "total_timeout": None, + "connect_timeout": None, + "sock_connect_timeout": None, + "sock_read_timeout": None, + } + self.remote = self.remotes_api.create(self.remote_attrs) + + def tearDown(self): + try: + response = self.remotes_api.delete(self.remote.pulp_href) + except ApiException as exc: + # The test_delete test will cause this to not be here + assert exc.status == 404 + else: + monitor_task(response.task) + + def _compare_results(self, data, received): + self.assertFalse(hasattr(received, "password")) + + # handle write only fields + data.pop("username", None) + data.pop("password", None) + data.pop("client_key", None) + + for k in data: + self.assertEqual(getattr(received, k), data[k]) + + def test_read(self): + # Compare initial-attrs vs remote created in setUp + self._compare_results(self.remote_attrs, self.remote) + + def test_update(self): + data = {"download_concurrency": 23, "policy": "immediate"} + self.remotes_api.partial_update(self.remote.pulp_href, data) + time.sleep(1) # without this, the read returns the pre-patch values + new_remote = self.remotes_api.read(self.remote.pulp_href) + self._compare_results(data, new_remote) + + def test_password_writeable(self): + """Test that a password can be updated with a PUT request.""" + cli_client = cli.Client(self.cfg) + remote = self.remotes_api.create({"name": "test_pass", "url": "http://", "password": "new"}) + href = remote.pulp_href + uuid = re.search(r"/api/v3/remotes/file/file/([\w-]+)/", href).group(1) + shell_cmd = ( + f"import pulpcore; print(pulpcore.app.models.Remote.objects.get(pk='{uuid}').password)" + ) + + self.addCleanup(self.remotes_api.delete, href) + + # test a PUT request with a new password + remote_update = FileFileRemote(name="test_pass", url="http://", password="changed") + response = self.remotes_api.update(href, remote_update) + monitor_task(response.task) + exc = cli_client.run(["pulpcore-manager", "shell", "-c", shell_cmd]) + self.assertEqual(exc.stdout.rstrip("\n"), "changed") + + def test_password_not_unset(self): + """Test that password doesn't get unset when not passed with a PUT request.""" + cli_client = cli.Client(self.cfg) + remote = self.remotes_api.create({"name": "test_pass", "url": "http://", "password": "new"}) + href = remote.pulp_href + uuid = re.search(r"/api/v3/remotes/file/file/([\w-]+)/", href).group(1) + shell_cmd = ( + f"import pulpcore; print(pulpcore.app.models.Remote.objects.get(pk='{uuid}').password)" + ) + + self.addCleanup(self.remotes_api.delete, href) + + # test a PUT request without a password + remote_update = FileFileRemote(name="pass_test", url="http://") + response = self.remotes_api.update(href, remote_update) + monitor_task(response.task) + exc = cli_client.run(["pulpcore-manager", "shell", "-c", shell_cmd]) + self.assertEqual(exc.stdout.rstrip("\n"), "new") + + def test_timeout_attributes(self): + # Test valid timeout settings (float >= 0) + data = { + "total_timeout": 1.0, + "connect_timeout": 66.0, + "sock_connect_timeout": 0.0, + "sock_read_timeout": 3.1415926535, + } + self.remotes_api.partial_update(self.remote.pulp_href, data) + time.sleep(1) + new_remote = self.remotes_api.read(self.remote.pulp_href) + self._compare_results(data, new_remote) + + def test_timeout_attributes_float_lt_zero(self): + # Test invalid float < 0 + data = { + "total_timeout": -1.0, + } + with self.assertRaises(ApiException): + self.remotes_api.partial_update(self.remote.pulp_href, data) + + def test_timeout_attributes_non_float(self): + # Test invalid non-float + data = { + "connect_timeout": "abc", + } + with self.assertRaises(ApiException): + self.remotes_api.partial_update(self.remote.pulp_href, data) + + def test_timeout_attributes_reset_to_empty(self): + # Test reset to empty + data = { + "total_timeout": False, + "connect_timeout": None, + "sock_connect_timeout": False, + "sock_read_timeout": None, + } + response = self.remotes_api.partial_update(self.remote.pulp_href, data) + monitor_task(response.task) + new_remote = self.remotes_api.read(self.remote.pulp_href) + self._compare_results(data, new_remote) + + def test_delete(self): + response = self.remotes_api.delete(self.remote.pulp_href) + monitor_task(response.task) + # verify the delete + with self.assertRaises(ApiException): + self.remotes_api.read(self.remote.pulp_href) + + def test_headers(self): + # Test that headers value must be a list of dicts + data = {"headers": {"Connection": "keep-alive"}} + with self.assertRaises(ApiException): + self.remotes_api.partial_update(self.remote.pulp_href, data) + data = {"headers": [1, 2, 3]} + with self.assertRaises(ApiException): + self.remotes_api.partial_update(self.remote.pulp_href, data) + data = {"headers": [{"Connection": "keep-alive"}]} + self.remotes_api.partial_update(self.remote.pulp_href, data) + + +@pytest.mark.parallel +class CreatePulpLabelsRemoteTestCase(unittest.TestCase): + """A test case for verifying whether pulp_labels are correctly assigned to a new remote.""" + + @classmethod + def setUpClass(cls): + """Initialize class-wide variables""" + cls.cfg = config.get_config() + + cls.api_client = api.Client(cls.cfg, api.json_handler) + cls.file_client = FileApiClient(cls.cfg.get_bindings_config()) + cls.remotes_api = RemotesFileApi(cls.file_client) + + cls.pulp_labels = {"environment": "dev"} + + def test_create_remote(self): + """Test if a created remote contains pulp_labels when passing JSON data.""" + remote_attrs = { + "name": utils.uuid4(), + "url": FILE_FIXTURE_MANIFEST_URL, + "pulp_labels": self.pulp_labels, + } + remote = self.remotes_api.create(remote_attrs) + self.addCleanup(self.remotes_api.delete, remote.pulp_href) + + self.assertEqual(remote.pulp_labels, self.pulp_labels) + + def test_create_remote_using_form(self): + """Test if a created remote contains pulp_labels when passing form data.""" + remote_attrs = { + "name": utils.uuid4(), + "url": FILE_FIXTURE_MANIFEST_URL, + "pulp_labels": json.dumps(self.pulp_labels), + } + remote = self.api_client.post(FILE_REMOTE_PATH, data=remote_attrs) + self.addCleanup(self.remotes_api.delete, remote["pulp_href"]) + self.assertEqual(remote["pulp_labels"], self.pulp_labels) + + +@pytest.mark.parallel +class RemoteFileURLsValidationTestCase(unittest.TestCase): + """A test case that verifies the validation of remotes' URLs.""" + + @classmethod + def setUpClass(cls): + """Initialize class-wide variables""" + cls.cfg = config.get_config() + + cls.api_client = api.Client(cls.cfg, api.json_handler) + cls.file_client = FileApiClient(cls.cfg.get_bindings_config()) + cls.remotes_api = RemotesFileApi(cls.file_client) + + def test_invalid_absolute_pathname(self): + """Test the validation of an invalid absolute pathname.""" + remote_attrs = { + "name": utils.uuid4(), + "url": "file://error/path/name", + } + self.raise_for_invalid_request(remote_attrs) + + def test_invalid_import_path(self): + """Test the validation of an invalid import pathname.""" + remote_attrs = { + "name": utils.uuid4(), + "url": "file:///error/path/name", + } + self.raise_for_invalid_request(remote_attrs) + + def raise_for_invalid_request(self, remote_attrs): + """Check if Pulp returns HTTP 400 after issuing an invalid request.""" + with self.assertRaises(ApiException) as ae: + remote = self.remotes_api.create(remote_attrs) + self.addCleanup(self.remotes_api.delete, remote.pulp_href) + + self.assertEqual(ae.exception.status, 400) + + def test_valid_import_path(self): + """Test the creation of a remote after passing a valid URL.""" + remote_attrs = { + "name": utils.uuid4(), + "url": "file:///tmp/good", + } + + remote = self.remotes_api.create(remote_attrs) + self.addCleanup(self.remotes_api.delete, remote.pulp_href) + + def test_no_username_password(self): + """Test that the remote url can't contain username/password.""" + remote_attrs = { + "name": utils.uuid4(), + "url": "http://elladan@rivendell.org", + } + self.raise_for_invalid_request(remote_attrs) + + remote_attrs = { + "name": utils.uuid4(), + "url": "http://elladan:pass@rivendell.org", + } + self.raise_for_invalid_request(remote_attrs) diff --git a/pulp_file/tests/functional/api/from_pulpcore/test_distributions.py b/pulp_file/tests/functional/api/from_pulpcore/test_distributions.py new file mode 100644 index 000000000..626794c60 --- /dev/null +++ b/pulp_file/tests/functional/api/from_pulpcore/test_distributions.py @@ -0,0 +1,513 @@ +"""Tests that perform actions over distributions.""" +import csv +import hashlib +import pytest +from time import sleep +import unittest +from urllib.parse import urljoin + +from pulp_smash import api, cli, config, utils +from pulp_smash.pulp3.bindings import delete_orphans, monitor_task +from pulp_smash.pulp3.utils import ( + download_content_unit, + download_content_unit_return_requests_response, + gen_distribution, + gen_repo, + get_content, + get_versions, + modify_repo, + sync, + utils as pulp3_utils, +) +from requests.exceptions import HTTPError + +from pulpcore.client.pulpcore import ApiException, StatusApi + +from pulpcore.client.pulp_file import ( + ContentFilesApi, + DistributionsFileApi, + FileFilePublication, + PublicationsFileApi, + RemotesFileApi, + RepositoriesFileApi, + RepositorySyncURL, +) + +from pulp_file.tests.functional.utils import ( + create_file_publication, + gen_file_remote, + gen_file_client, + gen_pulpcore_client, +) +from .constants import ( + BASE_DISTRIBUTION_PATH, + FILE_CHUNKED_FIXTURE_MANIFEST_URL, + FILE_CONTENT_NAME, + FILE_DISTRIBUTION_PATH, + FILE_FIXTURE_COUNT, + FILE_REMOTE_PATH, + FILE_URL, + FILE_REPO_PATH, +) + + +class CRUDPublicationDistributionTestCase(unittest.TestCase): + """CRUD Publication Distribution.""" + + @classmethod + def setUpClass(cls): + """Create class-wide variables.""" + cls.cfg = config.get_config() + cls.client = api.Client(cls.cfg) + + def setUp(self): + """Arrange the test.""" + self.attr = ( + "name", + "base_path", + ) + self.distribution = {} + self.publication = {} + self.remote = {} + self.repo = {} + + def tearDown(self): + """Clean variables.""" + for resource in (self.publication, self.remote, self.repo): + if resource: + self.client.delete(resource["pulp_href"]) + + def test_crud_workflow(self): + self._create() + self._read() + self._partially_update() + self._fully_update() + self._list() + self._delete_distribution() + + def _create(self): + """Create a publication distribution. + + Do the following: + + 1. Create a repository and 3 repository versions with at least 1 file + content in it. Create a publication using the second repository + version. + 2. Create a distribution with 'publication' field set to + the publication from step (1). + 3. Assert the distribution got created correctly with the correct + base_path, name, and publication. Assert that content guard is + unset. + 4. Assert that publication has a 'distributions' reference to the + distribution (it's backref). + + """ + self.repo.update(self.client.post(FILE_REPO_PATH, gen_repo())) + self.remote.update(self.client.post(FILE_REMOTE_PATH, gen_file_remote())) + # create 3 repository versions + sync(self.cfg, self.remote, self.repo) + self.repo = self.client.get(self.repo["pulp_href"]) + for file_content in get_content(self.repo)[FILE_CONTENT_NAME]: + modify_repo(self.cfg, self.repo, remove_units=[file_content]) + + self.repo = self.client.get(self.repo["pulp_href"]) + + versions = get_versions(self.repo) + + self.publication.update( + create_file_publication(self.cfg, self.repo, versions[1]["pulp_href"]) + ) + + self.distribution.update( + self.client.post( + FILE_DISTRIBUTION_PATH, gen_distribution(publication=self.publication["pulp_href"]) + ) + ) + + self.publication = self.client.get(self.publication["pulp_href"]) + + # content_guard and repository parameters unset. + for key, val in self.distribution.items(): + if key in ["content_guard", "repository"]: + self.assertIsNone(val, self.distribution) + else: + self.assertIsNotNone(val, self.distribution) + + self.assertEqual( + self.distribution["publication"], self.publication["pulp_href"], self.distribution + ) + + self.assertEqual( + self.publication["distributions"][0], self.distribution["pulp_href"], self.publication + ) + + def _read(self): + """Read distribution by its href.""" + distribution = self.client.get(self.distribution["pulp_href"]) + for key, val in self.distribution.items(): + with self.subTest(key=key): + self.assertEqual(distribution[key], val) + + def _partially_update(self): + """Update a distribution using PATCH.""" + for key in self.attr: + with self.subTest(key=key): + self._do_partially_update_attr(key) + + def _fully_update(self): + """Update a distribution using PUT.""" + for key in self.attr: + with self.subTest(key=key): + self._do_fully_update_attr(key) + + def _list(self): + """Test the generic distribution list endpoint.""" + distributions = self.client.get(BASE_DISTRIBUTION_PATH) + assert self.distribution["pulp_href"] in [distro["pulp_href"] for distro in distributions] + + def _delete_distribution(self): + """Delete a distribution.""" + self.client.delete(self.distribution["pulp_href"]) + with self.assertRaises(HTTPError): + self.client.get(self.distribution["pulp_href"]) + + def _do_fully_update_attr(self, attr): + """Update a distribution attribute using HTTP PUT. + + :param attr: The name of the attribute to update. + """ + distribution = self.client.get(self.distribution["pulp_href"]) + string = utils.uuid4() + distribution[attr] = string + self.client.put(distribution["pulp_href"], distribution) + + # verify the update + distribution = self.client.get(distribution["pulp_href"]) + self.assertEqual(string, distribution[attr], distribution) + + def _do_partially_update_attr(self, attr): + """Update a distribution using HTTP PATCH. + + :param attr: The name of the attribute to update. + """ + string = utils.uuid4() + self.client.patch(self.distribution["pulp_href"], {attr: string}) + + # Verify the update + distribution = self.client.get(self.distribution["pulp_href"]) + self.assertEqual(string, distribution[attr], self.distribution) + + +class DistributionBasePathTestCase(unittest.TestCase): + """Test possible values for ``base_path`` on a distribution.""" + + @classmethod + def setUpClass(cls): + """Create class-wide variables.""" + cls.cfg = config.get_config() + cls.client = api.Client(cls.cfg) + + def setUp(self): + """Set up resources.""" + body = gen_distribution() + body["base_path"] = body["base_path"].replace("-", "/") + self.distribution = self.client.post(FILE_DISTRIBUTION_PATH, body) + + def tearDown(self): + """Clean up resources.""" + response = self.client.delete(self.distribution["pulp_href"]) + monitor_task(response["pulp_href"]) + + def test_negative_create_using_spaces(self): + """Test that spaces can not be part of ``base_path``.""" + self.try_create_distribution(base_path=utils.uuid4().replace("-", " ")) + self.try_update_distribution(base_path=utils.uuid4().replace("-", " ")) + + def test_negative_create_using_begin_slash(self): + """Test that slash cannot be in the begin of ``base_path``.""" + self.try_create_distribution(base_path="/" + utils.uuid4()) + self.try_update_distribution(base_path="/" + utils.uuid4()) + + def test_negative_create_using_end_slash(self): + """Test that slash cannot be in the end of ``base_path``.""" + self.try_create_distribution(base_path=utils.uuid4() + "/") + self.try_update_distribution(base_path=utils.uuid4() + "/") + + def test_negative_create_using_non_unique_base_path(self): + """Test that ``base_path`` can not be duplicated.""" + self.try_create_distribution(base_path=self.distribution["base_path"]) + + def test_negative_create_using_overlapping_base_path(self): + """Test that distributions can't have overlapping ``base_path``. + + See: `Pulp #2987`_. + """ + base_path = self.distribution["base_path"].rsplit("/", 1)[0] + self.try_create_distribution(base_path=base_path) + + base_path = "/".join((self.distribution["base_path"], utils.uuid4().replace("-", "/"))) + self.try_create_distribution(base_path=base_path) + + def try_create_distribution(self, **kwargs): + """Unsuccessfully create a distribution. + + Merge the given kwargs into the body of the request. + """ + body = gen_distribution() + body.update(kwargs) + with self.assertRaises(HTTPError) as ctx: + self.client.post(FILE_DISTRIBUTION_PATH, body) + + self.assertIsNotNone( + ctx.exception.response.json()["base_path"], ctx.exception.response.json() + ) + + def try_update_distribution(self, **kwargs): + """Unsuccessfully update a distribution with HTTP PATCH. + + Use the given kwargs as the body of the request. + """ + with self.assertRaises(HTTPError) as ctx: + self.client.patch(self.distribution["pulp_href"], kwargs) + + self.assertIsNotNone( + ctx.exception.response.json()["base_path"], ctx.exception.response.json() + ) + + +class ContentServePublicationDistributionTestCase(unittest.TestCase): + """Verify that content is served from a publication distribution. + + Assert that published metadata and content is served from a publication + distribution. + + This test targets the following issue: + + `Pulp #4847 `_ + """ + + @classmethod + def setUpClass(cls): + """Create class-wide variables.""" + cls.cfg = config.get_config() + cls.client = gen_file_client() + + cls.content_api = ContentFilesApi(cls.client) + cls.repo_api = RepositoriesFileApi(cls.client) + cls.remote_api = RemotesFileApi(cls.client) + cls.publications_api = PublicationsFileApi(cls.client) + cls.distributions_api = DistributionsFileApi(cls.client) + + def setUp(self): + delete_orphans() + + def test_nonpublished_content_not_served(self): + """Verify content that hasn't been published is not served.""" + self.setup_download_test("immediate", publish=False) + files = ["", "1.iso", "2.iso", "3.iso"] + for file in files: + with self.assertRaises(HTTPError, msg=f"{file}") as cm: + download_content_unit(self.cfg, self.distribution.to_dict(), file) + self.assertEqual(cm.exception.response.status_code, 404, f"{file}") + + def test_content_served_on_demand(self): + """Assert that on_demand content can be properly downloaded.""" + self.setup_download_test("on_demand") + self.do_test_content_served() + + def test_content_served_immediate(self): + """Assert that downloaded content can be properly downloaded.""" + self.setup_download_test("immediate") + self.do_test_content_served() + + def test_content_served_streamed(self): + """Assert that streamed content can be properly downloaded.""" + self.setup_download_test("streamed") + self.do_test_content_served() + + def test_content_served_immediate_with_range_request_inside_one_chunk(self): + """Assert that downloaded content can be properly downloaded with range requests.""" + self.setup_download_test("immediate", url=FILE_CHUNKED_FIXTURE_MANIFEST_URL) + range_headers = {"Range": "bytes=1048586-1049586"} + num_bytes = 1001 + self.do_range_request_download_test(range_headers, num_bytes) + + def test_content_served_immediate_with_range_request_over_three_chunks(self): + """Assert that downloaded content can be properly downloaded with range requests.""" + self.setup_download_test("immediate", url=FILE_CHUNKED_FIXTURE_MANIFEST_URL) + range_headers = {"Range": "bytes=1048176-2248576"} + num_bytes = 1200401 + self.do_range_request_download_test(range_headers, num_bytes) + + def test_content_served_on_demand_with_range_request_over_three_chunks(self): + """Assert that on_demand content can be properly downloaded with range requests.""" + self.setup_download_test("on_demand", url=FILE_CHUNKED_FIXTURE_MANIFEST_URL) + range_headers = {"Range": "bytes=1048176-2248576"} + num_bytes = 1200401 + self.do_range_request_download_test(range_headers, num_bytes) + + def test_content_served_streamed_with_range_request_over_three_chunks(self): + """Assert that streamed content can be properly downloaded with range requests.""" + self.setup_download_test("streamed", url=FILE_CHUNKED_FIXTURE_MANIFEST_URL) + range_headers = {"Range": "bytes=1048176-2248576"} + num_bytes = 1200401 + self.do_range_request_download_test(range_headers, num_bytes) + + def test_content_served_immediate_with_multiple_different_range_requests(self): + """Assert that multiple requests with different Range header values work as expected.""" + self.setup_download_test("immediate", url=FILE_CHUNKED_FIXTURE_MANIFEST_URL) + range_headers = {"Range": "bytes=1048176-2248576"} + num_bytes = 1200401 + self.do_range_request_download_test(range_headers, num_bytes) + range_headers = {"Range": "bytes=2042176-3248576"} + num_bytes = 1206401 + self.do_range_request_download_test(range_headers, num_bytes) + + def test_content_served_immediate_with_range_request_invalid_start_value(self): + """Assert that range requests with a negative start value errors as expected.""" + cfg = config.get_config() + cli_client = cli.Client(cfg) + storage = utils.get_pulp_setting(cli_client, "DEFAULT_FILE_STORAGE") + if storage != "pulpcore.app.models.storage.FileSystem": + self.skipTest("The S3 test API project doesn't handle invalid Range values correctly") + self.setup_download_test("immediate", url=FILE_CHUNKED_FIXTURE_MANIFEST_URL) + with self.assertRaises(HTTPError) as cm: + download_content_unit_return_requests_response( + self.cfg, self.distribution.to_dict(), "1.iso", headers={"Range": "bytes=-1-11"} + ) + self.assertEqual(cm.exception.response.status_code, 416) + + def test_content_served_immediate_with_range_request_too_large_end_value(self): + """Assert that a range request with a end value that is larger than the data works still.""" + self.setup_download_test("immediate", url=FILE_CHUNKED_FIXTURE_MANIFEST_URL) + range_headers = {"Range": "bytes=10485260-10485960"} + num_bytes = 500 + self.do_range_request_download_test(range_headers, num_bytes) + + def test_content_served_immediate_with_range_request_start_value_larger_than_content(self): + """Assert that a range request with a start value larger than the content errors.""" + self.setup_download_test("immediate", url=FILE_CHUNKED_FIXTURE_MANIFEST_URL) + with self.assertRaises(HTTPError) as cm: + download_content_unit_return_requests_response( + self.cfg, + self.distribution.to_dict(), + "1.iso", + headers={"Range": "bytes=10485860-10485870"}, + ) + self.assertEqual(cm.exception.response.status_code, 416) + + @pytest.mark.skip("Sometimes PostgreSQL doesn't restart properly in CI.") + def test_content_served_after_db_restart(self): + """ + Assert that content can be downloaded after the database has been restarted. + This test also check that the HTML page with a list of distributions is also + available after the connection to the database has been closed. + """ + cfg = config.get_config() + pulp_host = cfg.hosts[0] + svc_mgr = cli.ServiceManager(cfg, pulp_host) + if svc_mgr._svc_mgr == "s6": + postgresql_service_name = "postgresql" + else: + postgresql_service_name = "*postgresql*" + postgresql_found = svc_mgr.is_active([postgresql_service_name]) + self.assertTrue( + postgresql_found, "PostgreSQL service not found or is not active. Can't restart it." + ) + svc_mgr.restart([postgresql_service_name]) + # Wait for postgres to come back and pulpcore-api to recover + status_api = StatusApi(gen_pulpcore_client()) + for i in range(5): + sleep(2) + try: + status_api.status_read() + break + except ApiException: + if i == 4: + raise + self.setup_download_test("immediate") + self.do_test_content_served() + url_fragments = [ + cfg.get_content_host_base_url(), + "pulp/content", + ] + content_app_root = "/".join(url_fragments) + pulp3_utils.http_get(content_app_root) + + def setup_download_test(self, policy, url=None, publish=True): + # Create a repository + self.repo = self.repo_api.create(gen_repo()) + self.addCleanup(self.repo_api.delete, self.repo.pulp_href) + + # Create a remote + remote_options = {"policy": policy} + if url: + remote_options["url"] = url + + self.remote = self.remote_api.create(gen_file_remote(**remote_options)) + self.addCleanup(self.remote_api.delete, self.remote.pulp_href) + + # Sync the repository. + repository_sync_data = RepositorySyncURL(remote=self.remote.pulp_href) + sync_response = self.repo_api.sync(self.repo.pulp_href, repository_sync_data) + monitor_task(sync_response.task) + + if publish: + # Create a publication. + publish_data = FileFilePublication(repository=self.repo.pulp_href) + publish_response = self.publications_api.create(publish_data) + publication_href = monitor_task(publish_response.task).created_resources[0] + self.addCleanup(self.publications_api.delete, publication_href) + serve, served_href = "publication", publication_href + else: + serve, served_href = "repository", self.repo.pulp_href + + # Create a distribution. + response = self.distributions_api.create(gen_distribution(**{serve: served_href})) + distribution_href = monitor_task(response.task).created_resources[0] + self.distribution = self.distributions_api.read(distribution_href) + self.addCleanup(self.distributions_api.delete, self.distribution.pulp_href) + + def do_test_content_served(self): + file_path = "1.iso" + + req1 = download_content_unit(self.cfg, self.distribution.to_dict(), file_path) + req2 = download_content_unit(self.cfg, self.distribution.to_dict(), file_path) + fixtures_hash = hashlib.sha256(utils.http_get(urljoin(FILE_URL, file_path))).hexdigest() + + first_dl_hash = hashlib.sha256(req1).hexdigest() + second_dl_hash = hashlib.sha256(req2).hexdigest() + + self.assertEqual(first_dl_hash, fixtures_hash) + self.assertEqual(first_dl_hash, second_dl_hash) + + manifest = download_content_unit(self.cfg, self.distribution.to_dict(), "PULP_MANIFEST") + pulp_manifest = list( + csv.DictReader(manifest.decode("utf-8").splitlines(), ("name", "checksum", "size")) + ) + + self.assertEqual(len(pulp_manifest), FILE_FIXTURE_COUNT, pulp_manifest) + + def do_range_request_download_test(self, range_header, expected_bytes): + file_path = "1.iso" + + req1_reponse = download_content_unit_return_requests_response( + self.cfg, self.distribution.to_dict(), file_path, headers=range_header + ) + req2_response = download_content_unit_return_requests_response( + self.cfg, self.distribution.to_dict(), file_path, headers=range_header + ) + + self.assertEqual(expected_bytes, len(req1_reponse.content)) + self.assertEqual(expected_bytes, len(req2_response.content)) + self.assertEqual(req1_reponse.content, req2_response.content) + + self.assertEqual(req1_reponse.status_code, 206) + self.assertEqual(req1_reponse.status_code, req2_response.status_code) + + self.assertEqual(str(expected_bytes), req1_reponse.headers["Content-Length"]) + self.assertEqual(str(expected_bytes), req2_response.headers["Content-Length"]) + + self.assertEqual( + req1_reponse.headers["Content-Range"], req2_response.headers["Content-Range"] + ) diff --git a/pulp_file/tests/functional/api/from_pulpcore/test_filesystemexport.py b/pulp_file/tests/functional/api/from_pulpcore/test_filesystemexport.py new file mode 100644 index 000000000..2cdca7f14 --- /dev/null +++ b/pulp_file/tests/functional/api/from_pulpcore/test_filesystemexport.py @@ -0,0 +1,230 @@ +""" +Tests FilesystemExporter and FilesystemExport functionality + +NOTE: assumes ALLOWED_EXPORT_PATHS setting contains "/tmp" - all tests will fail if this is not +the case. +""" +import unittest +from pulp_smash import api, cli, config +from pulp_smash.utils import uuid4 +from pulp_smash.pulp3.bindings import monitor_task +from pulp_smash.pulp3.utils import gen_repo + +from pulpcore.client.pulpcore.exceptions import ApiException + +from pulpcore.client.pulpcore import ( + ApiClient as CoreApiClient, + ExportersFilesystemApi, + ExportersFilesystemExportsApi, +) +from pulpcore.client.pulp_file import ( + ContentFilesApi, + FileFilePublication, + PublicationsFileApi, + RepositoriesFileApi, + RepositoriesFileVersionsApi, + RepositorySyncURL, + RemotesFileApi, +) + +from pulp_file.tests.functional.utils import ( + gen_file_client, + gen_file_remote, +) + +NUM_REPOS = 1 +NUM_EXPORTERS = 4 + + +class BaseExporterCase(unittest.TestCase): + """ + Base functionality for Exporter and Export test classes + + The export process isn't possible without repositories having been sync'd - arranging for + that to happen once per-class (instead of once-per-test) is the primary purpose of this parent + class. + """ + + def _setup_repositories(self): + """Create and sync a number of repositories to be exported.""" + # create and remember a set of repo + repos = [] + remotes = [] + publications = [] + for r in range(NUM_REPOS): + repo = self.repo_api.create(gen_repo()) + remote = self.remote_api.create(gen_file_remote()) + + repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) + sync_response = self.repo_api.sync(repo.pulp_href, repository_sync_data) + monitor_task(sync_response.task) + + repo = self.repo_api.read(file_file_repository_href=repo.pulp_href) + publish_data = FileFilePublication(repository=repo.pulp_href) + publish_response = self.publication_api.create(publish_data) + created_resources = monitor_task(publish_response.task).created_resources + publication_href = created_resources[0] + publication = self.publication_api.read(publication_href) + + repos.append(repo) + remotes.append(remote) + publications.append(publication) + return repos, remotes, publications + + @classmethod + def setUpClass(cls): + """Create class-wide variables.""" + cls.cfg = config.get_config() + cls.client = api.Client(cls.cfg, api.json_handler) + cls.core_client = CoreApiClient(configuration=cls.cfg.get_bindings_config()) + cls.file_client = gen_file_client() + + cls.content_api = ContentFilesApi(cls.file_client) + cls.repo_api = RepositoriesFileApi(cls.file_client) + cls.versions_api = RepositoriesFileVersionsApi(cls.file_client) + cls.remote_api = RemotesFileApi(cls.file_client) + cls.publication_api = PublicationsFileApi(cls.file_client) + cls.exporter_api = ExportersFilesystemApi(cls.core_client) + cls.exports_api = ExportersFilesystemExportsApi(cls.core_client) + + def setUp(self): + """Arrange necessary objects.""" + self.repos, self.remotes, self.publications = self._setup_repositories() + + def tearDown(self): + """Clean up after ourselves.""" + for remote in self.remotes: + self.remote_api.delete(remote.pulp_href) + for repo in self.repos: + self.repo_api.delete(repo.pulp_href) + + def _delete_exporter(self, exporter): + """ + Utility routine to delete an exporter. + """ + cli_client = cli.Client(self.cfg) + cmd = ("rm", "-rf", exporter.path) + cli_client.run(cmd, sudo=True) + + result = self.exporter_api.delete(exporter.pulp_href) + monitor_task(result.task) + + def _create_exporter(self, params={}): + """ + Utility routine to create an exporter for the available repositories. + """ + body = { + "name": uuid4(), + "path": "/tmp/{}/".format(uuid4()), + } + body.update(params) + + exporter = self.exporter_api.create(body) + self.addCleanup(self._delete_exporter, exporter) + return exporter, body + + +class FilesystemExporterTestCase(BaseExporterCase): + """Test FilesystemExporter CURDL methods.""" + + def test_workflow(self): + self._create() + self._read() + self._partial_update() + self._list() + self._delete() + self._method() + + def _create(self): + """Create a FilesystemExporter.""" + exporter, body = self._create_exporter() + self.assertEqual(body["name"], exporter.name) + self.assertEqual(body["path"], exporter.path) + + def _read(self): + """Read a created FilesystemExporter.""" + exporter_created, body = self._create_exporter() + exporter_read = self.exporter_api.read(exporter_created.pulp_href) + self.assertEqual(exporter_created.name, exporter_read.name) + self.assertEqual(exporter_created.path, exporter_read.path) + + def _partial_update(self): + """Update a FilesystemExporter's path.""" + exporter_created, body = self._create_exporter() + body = {"path": "/tmp/{}".format(uuid4())} + result = self.exporter_api.partial_update(exporter_created.pulp_href, body) + monitor_task(result.task) + exporter_read = self.exporter_api.read(exporter_created.pulp_href) + self.assertNotEqual(exporter_created.path, exporter_read.path) + self.assertEqual(body["path"], exporter_read.path) + + def _list(self): + """Show a set of created FilesystemExporters.""" + starting_exporters = self.exporter_api.list().results + for x in range(NUM_EXPORTERS): + self._create_exporter() + ending_exporters = self.exporter_api.list().results + self.assertEqual(NUM_EXPORTERS, len(ending_exporters) - len(starting_exporters)) + + def _delete(self): + """Delete a pulpExporter.""" + exporter = self.exporter_api.create({"name": "test", "path": "/tmp/abc"}) + result = self.exporter_api.delete(exporter.pulp_href) + monitor_task(result.task) + with self.assertRaises(ApiException) as ae: + self.exporter_api.read(exporter.pulp_href) + self.assertEqual(404, ae.exception.status) + + def _method(self): + """Test the method field.""" + exporter, _ = self._create_exporter({"method": "symlink"}) + self.assertEqual("symlink", exporter.method) + + with self.assertRaises(ApiException) as ae: + exporter = self._create_exporter({"method": "invalid"}) + self.assertEqual(400, ae.exception.status) + + +class FilesystemExportTestCase(BaseExporterCase): + """Test FilesystemExport CRDL methods (Update is not allowed).""" + + def _gen_export(self, exporter, publication): + """Create and read back an export for the specified FilesystemExporter.""" + body = {"publication": publication.pulp_href} + export_response = self.exports_api.create(exporter.pulp_href, body) + monitor_task(export_response.task) + + task = self.client.get(export_response.task) + resources = task["created_resources"] + self.assertEqual(1, len(resources)) + + return self.exports_api.read(resources[0]) + + def test_workflow(self): + self._export() + self._list() + self._delete() + + def _export(self): + """Issue and evaluate a FilesystemExport (tests both Create and Read).""" + exporter, body = self._create_exporter({"method": "write"}) + export = self._gen_export(exporter, self.publications[0]) + self.assertIsNotNone(export) + + def _list(self): + """Find all the FilesystemExports for a FilesystemExporter.""" + exporter, body = self._create_exporter({"method": "write"}) + for i in range(NUM_REPOS): + self._gen_export(exporter, self.publications[i]) + exporter = self.exporter_api.read(exporter.pulp_href) + exports = self.exports_api.list(exporter.pulp_href).results + self.assertEqual(NUM_REPOS, len(exports)) + + def _delete(self): + """Test deleting exports for a FilesystemExporter.""" + exporter, body = self._create_exporter({"method": "write"}) + export = self._gen_export(exporter, self.publications[0]) + self.exports_api.delete(export.pulp_href) + with self.assertRaises(ApiException) as ae: + self.exports_api.read(export.pulp_href) + self.assertEqual(404, ae.exception.status) diff --git a/pulp_file/tests/functional/api/from_pulpcore/test_generic_list.py b/pulp_file/tests/functional/api/from_pulpcore/test_generic_list.py new file mode 100644 index 000000000..244bcbffe --- /dev/null +++ b/pulp_file/tests/functional/api/from_pulpcore/test_generic_list.py @@ -0,0 +1,93 @@ +"""Tests that look at generic list endpoints.""" +import tempfile +import unittest + +from pulp_smash import config, utils +from pulp_smash.pulp3.bindings import monitor_task +from pulp_smash.pulp3.utils import gen_repo + +from pulpcore.client.pulpcore import ( + ApiClient as CoreApiClient, + ContentApi, + ContentguardsApi, + RepositoriesApi, +) +from pulpcore.client.pulp_file import ( + ApiClient as FileApiClient, + ContentFilesApi, + RepositoriesFileApi, +) + +from pulpcore.client.pulp_certguard import ( + ApiClient as CertGuardApiClient, + ContentguardsX509Api, +) + +from .constants import X509_CA_CERT_FILE_PATH + + +class GenericListTestCase(unittest.TestCase): + """Test generic list endpoints.""" + + @classmethod + def setUpClass(cls): + cls.cfg = config.get_config() + cls.file_repositories_api = RepositoriesFileApi( + FileApiClient(cls.cfg.get_bindings_config()) + ) + cls.file_content_api = ContentFilesApi(FileApiClient(cls.cfg.get_bindings_config())) + cls.cert_guards_api = ContentguardsX509Api( + CertGuardApiClient(cls.cfg.get_bindings_config()) + ) + + def setUp(self): + self.repo = self.file_repositories_api.create(gen_repo()) + with tempfile.NamedTemporaryFile() as tmp_file: + tmp_file.write(b"not empty") + tmp_file.flush() + monitor_task( + self.file_content_api.create(relative_path=utils.uuid4(), file=tmp_file.name).task + ) + + with open(X509_CA_CERT_FILE_PATH, "r") as x509_ca_cert_data_file: + x509_ca_cert_data = x509_ca_cert_data_file.read() + + self.content_guard = self.cert_guards_api.create( + {"name": utils.uuid4(), "ca_certificate": x509_ca_cert_data} + ) + + def tearDown(self): + monitor_task(self.file_repositories_api.delete(self.repo.pulp_href).task) + self.cert_guards_api.delete(self.content_guard.pulp_href) + + def test_read_generic_endpoints(self): + self._read_all_repos_generic() + self._read_all_content_generic() + self._read_all_content_guards_generic() + + def _read_all_repos_generic(self): + """Ensure name is displayed when listing repositories generic.""" + repositories_api = RepositoriesApi(CoreApiClient(self.cfg.get_bindings_config())) + + response = repositories_api.list() + self.assertNotEqual(response.count, 0) + for repo in response.results: + self.assertIsNotNone(repo.name) + + def _read_all_content_generic(self): + """Ensure href is displayed when listing content generic.""" + content_api = ContentApi(CoreApiClient(self.cfg.get_bindings_config())) + + response = content_api.list() + self.assertNotEqual(response.count, 0) + for content in response.results: + self.assertIsNotNone(content.pulp_href) + + def _read_all_content_guards_generic(self): + """Ensure name is displayed when listing content guards generic.""" + content_guards_api = ContentguardsApi(CoreApiClient(self.cfg.get_bindings_config())) + + response = content_guards_api.list() + self.assertNotEqual(response.count, 0) + for content_guard in response.results: + self.assertIsNotNone(content_guard.name) diff --git a/pulp_file/tests/functional/api/from_pulpcore/test_labels.py b/pulp_file/tests/functional/api/from_pulpcore/test_labels.py new file mode 100644 index 000000000..7f770a1f0 --- /dev/null +++ b/pulp_file/tests/functional/api/from_pulpcore/test_labels.py @@ -0,0 +1,213 @@ +import json +import unittest +from uuid import uuid4 + +from pulp_smash import config +from pulp_smash.pulp3.bindings import monitor_task + +from pulpcore.client.pulp_file import ( + ApiClient as FileApiClient, + RepositoriesFileApi, +) +from pulpcore.client.pulp_file.exceptions import ApiException + + +class BaseLabelTestCase(unittest.TestCase): + """Base class for label test classes.""" + + @classmethod + def setUpClass(cls): + """Create class-wide variables.""" + cls.cfg = config.get_config() + cls.repo = None + + def setUp(self): + """Create an API client.""" + self.client = FileApiClient(self.cfg.get_bindings_config()) + self.repo_api = RepositoriesFileApi(self.client) + + def _create_repo(self, labels={}): + attrs = {"name": str(uuid4())} + if labels: + attrs["pulp_labels"] = labels + self.repo = self.repo_api.create(attrs) + self.addCleanup(self.repo_api.delete, self.repo.pulp_href) + + +class CRUDLabelTestCase(BaseLabelTestCase): + """CRUD labels on repositories.""" + + @classmethod + def setUpClass(cls): + """Create class-wide variables.""" + cls.cfg = config.get_config() + cls.repo = None + + def setUp(self): + """Create an API client.""" + self.client = FileApiClient(self.cfg.get_bindings_config()) + self.repo_api = RepositoriesFileApi(self.client) + + def _create_repo(self, labels={}): + attrs = {"name": str(uuid4())} + if labels: + attrs["pulp_labels"] = labels + self.repo = self.repo_api.create(attrs) + self.addCleanup(self.repo_api.delete, self.repo.pulp_href) + + def test_create_repo_with_labels(self): + """Create repository with labels.""" + labels = {"maiar": "mithrandir"} + self._create_repo(labels) + self.assertEqual(labels, self.repo.pulp_labels) + + def test_add_repo_labels(self): + """Update repository with labels.""" + labels = {"maiar": "mithrandir", "valar": "varda"} + self._create_repo() + + resp = self.repo_api.partial_update(self.repo.pulp_href, {"pulp_labels": labels}) + monitor_task(resp.task) + self.repo = self.repo_api.read(self.repo.pulp_href) + self.assertEqual(labels, self.repo.pulp_labels) + + def test_update_repo_label(self): + """Test updating an existing label.""" + labels = {"valar": "varda"} + self._create_repo(labels) + + labels["valar"] = "manwe" + + resp = self.repo_api.partial_update(self.repo.pulp_href, {"pulp_labels": labels}) + monitor_task(resp.task) + self.repo = self.repo_api.read(self.repo.pulp_href) + self.assertEqual(labels, self.repo.pulp_labels) + + def test_unset_repo_label(self): + """Test unsetting a repo label.""" + labels = {"maiar": "mithrandir", "valar": "varda"} + self._create_repo(labels) + + labels.pop("valar") + resp = self.repo_api.partial_update(self.repo.pulp_href, {"pulp_labels": labels}) + monitor_task(resp.task) + self.repo = self.repo_api.read(self.repo.pulp_href) + self.assertEqual(1, len(self.repo.pulp_labels)) + self.assertEqual(labels, self.repo.pulp_labels) + + def test_remove_all_repo_labels(self): + """Test removing all labels.""" + labels = {"maiar": "mithrandir", "valar": "varda"} + self._create_repo(labels) + + resp = self.repo_api.partial_update(self.repo.pulp_href, {"pulp_labels": {}}) + monitor_task(resp.task) + self.repo = self.repo_api.read(self.repo.pulp_href) + self.assertEqual(0, len(self.repo.pulp_labels)) + self.assertEqual({}, self.repo.pulp_labels) + + def test_model_partial_update(self): + """Test that labels aren't unset accidentially with PATCH calls.""" + labels = {"maiar": "mithrandir", "valar": "varda"} + self._create_repo(labels) + + resp = self.repo_api.partial_update(self.repo.pulp_href, {"name": str(uuid4())}) + monitor_task(resp.task) + self.repo = self.repo_api.read(self.repo.pulp_href) + self.assertEqual(labels, self.repo.pulp_labels) + + def test_invalid_label_type(self): + """Test that label doesn't accept non-dicts""" + with self.assertRaises(ApiException) as ae: + self._create_repo("morgoth") # str instead of dict + + self.assertEqual(400, ae.exception.status) + self.assertTrue("pulp_labels" in json.loads(ae.exception.body)) + + def test_invalid_labels(self): + """Test that label keys and values are validated.""" + with self.assertRaises(ApiException) as ae: + self._create_repo({"@": "maia"}) + + self.assertEqual(400, ae.exception.status) + self.assertTrue("pulp_labels" in json.loads(ae.exception.body)) + + with self.assertRaises(ApiException) as ae: + self._create_repo({"arda": "eru,illuvata"}) + + self.assertEqual(400, ae.exception.status) + self.assertTrue("pulp_labels" in json.loads(ae.exception.body)) + + +class FilterLabelTestCase(BaseLabelTestCase): + """CRUD labels on repositories.""" + + @classmethod + def setUpClass(cls): + """Create class-wide variables.""" + cls.cfg = config.get_config() + cls.repo = None + + def setUp(self): + """Create an API client.""" + self.client = FileApiClient(self.cfg.get_bindings_config()) + self.repo_api = RepositoriesFileApi(self.client) + + def _filter_labels(self, pulp_label_select): + resp = self.repo_api.list(pulp_label_select=pulp_label_select) + return resp.results + + def test_label_select(self): + """Test removing all labels.""" + labels = {"environment": "production", "certified": "true"} + self._create_repo(labels) + labels = {"environment": "staging", "certified": "false"} + self._create_repo(labels) + labels = {} + self._create_repo(labels) + + repos = self._filter_labels("environment=production") + self.assertEqual(1, len(repos)) + + repos = self._filter_labels("environment!=production") + self.assertEqual(1, len(repos)) + + repos = self._filter_labels("environment") + self.assertEqual(2, len(repos)) + + repos = self._filter_labels("environment~prod") + self.assertEqual(1, len(repos)) + + repos = self._filter_labels("environment=production,certified=true") + self.assertEqual(1, len(repos)) + + repos = self._filter_labels("environment=production,certified!=false") + self.assertEqual(1, len(repos)) + + repos = self._filter_labels("!environment,certified=false") + self.assertEqual(0, len(repos)) + + def test_empty_blank_filter(self): + """Test filtering values with a blank string.""" + labels = {"environment": ""} + self._create_repo(labels) + + repos = self._filter_labels("environment=") + self.assertEqual(1, len(repos)) + + repos = self._filter_labels("environment~") + self.assertEqual(1, len(repos)) + + def test_invalid_label_select(self): + """Test removing all labels.""" + with self.assertRaises(ApiException) as ae: + self._filter_labels("") + self.assertEqual(400, ae.exception.status) + + with self.assertRaises(ApiException) as ae: + self._filter_labels("!environment=production") + self.assertEqual(400, ae.exception.status) + + with self.assertRaises(ApiException) as ae: + self._filter_labels("=bad filter") + self.assertEqual(400, ae.exception.status) diff --git a/pulp_file/tests/functional/api/from_pulpcore/test_orphans.py b/pulp_file/tests/functional/api/from_pulpcore/test_orphans.py new file mode 100644 index 000000000..141726eec --- /dev/null +++ b/pulp_file/tests/functional/api/from_pulpcore/test_orphans.py @@ -0,0 +1,300 @@ +"""Tests that perform actions over orphan files.""" +import os +import unittest +from random import choice + +from pulp_smash import cli, config, utils +from pulp_smash.exceptions import CalledProcessError +from pulp_smash.pulp3.bindings import monitor_task +from pulp_smash.pulp3.utils import ( + delete_version, + gen_repo, + get_content, + get_versions, +) + +from pulpcore.client.pulpcore import ArtifactsApi +from pulpcore.client.pulpcore import OrphansApi, OrphansCleanupApi +from pulpcore.client.pulpcore.exceptions import ApiException +from pulpcore.client.pulp_file import ( + ApiClient, + ContentFilesApi, + RepositoriesFileApi, + RepositorySyncURL, + RemotesFileApi, +) + +from pulp_file.tests.functional.utils import configuration, gen_file_remote, gen_pulpcore_client +from .constants import FILE_CONTENT_NAME + + +class DeleteOrphansTestCase(unittest.TestCase): + """Test whether orphan files can be cleaned up. + + An orphan artifact is an artifact that is not in any content units. + An orphan content unit is a content unit that is not in any repository + version. + + """ + + @classmethod + def setUpClass(cls): + """Create class-wide variables.""" + cls.cfg = config.get_config() + cls.api_client = ApiClient(configuration) + cls.cli_client = cli.Client(cls.cfg) + cls.core_client = gen_pulpcore_client() + cls.orphans_api = OrphansApi(cls.core_client) + cls.orphans_cleanup_api = OrphansCleanupApi(cls.core_client) + cls.storage = utils.get_pulp_setting(cls.cli_client, "DEFAULT_FILE_STORAGE") + cls.media_root = utils.get_pulp_setting(cls.cli_client, "MEDIA_ROOT") + cls.orphan_protection_time = utils.get_pulp_setting( + cls.cli_client, "ORPHAN_PROTECTION_TIME" + ) + + orphans_response = cls.orphans_cleanup_api.cleanup({"orphan_protection_time": 0}) + monitor_task(orphans_response.task) + + def test_clean_orphan_content_unit(self): + """Test whether orphaned content units can be cleaned up.""" + repo_api = RepositoriesFileApi(self.api_client) + remote_api = RemotesFileApi(self.api_client) + + repo = repo_api.create(gen_repo()) + self.addCleanup(repo_api.delete, repo.pulp_href) + + body = gen_file_remote() + remote = remote_api.create(body) + self.addCleanup(remote_api.delete, remote.pulp_href) + + # Sync the repository. + self.assertEqual(repo.latest_version_href, f"{repo.pulp_href}versions/0/") + repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) + sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) + monitor_task(sync_response.task) + repo = repo_api.read(repo.pulp_href) + content = choice(get_content(repo.to_dict())[FILE_CONTENT_NAME]) + + # Create an orphan content unit. + repo_api.modify(repo.pulp_href, dict(remove_content_units=[content["pulp_href"]])) + + artifacts_api = ArtifactsApi(self.core_client) + + if self.storage == "pulpcore.app.models.storage.FileSystem": + # Verify that the artifact is present on disk. + relative_path = artifacts_api.read(content["artifact"]).file + artifact_path = os.path.join(self.media_root, relative_path) + cmd = ("ls", artifact_path) + self.cli_client.run(cmd, sudo=True) + + file_contents_api = ContentFilesApi(self.api_client) + # Delete first repo version. The previous removed content unit will be + # an orphan. + delete_version(repo, get_versions(repo.to_dict())[1]["pulp_href"]) + content_units = file_contents_api.list().to_dict()["results"] + content_units_href = [c["pulp_href"] for c in content_units] + self.assertIn(content["pulp_href"], content_units_href) + + orphans_response = self.orphans_api.delete() + monitor_task(orphans_response.task) + + content_units = file_contents_api.list().to_dict()["results"] + content_units_href = [c["pulp_href"] for c in content_units] + + if self.orphan_protection_time == 0: + self.assertNotIn(content["pulp_href"], content_units_href) + + if self.storage == "pulpcore.app.models.storage.FileSystem": + # Verify that the artifact was removed from disk. + with self.assertRaises(CalledProcessError): + self.cli_client.run(cmd) + + def test_clean_orphan_artifact(self): + """Test whether orphan artifacts units can be clean up.""" + repo_api = RepositoriesFileApi(self.api_client) + repo = repo_api.create(gen_repo()) + self.addCleanup(repo_api.delete, repo.pulp_href) + + artifacts_api = ArtifactsApi(self.core_client) + artifact = artifacts_api.create(file=__file__) + + if self.storage == "pulpcore.app.models.storage.FileSystem": + cmd = ("ls", os.path.join(self.media_root, artifact.file)) + self.cli_client.run(cmd, sudo=True) + + orphans_response = self.orphans_api.delete() + monitor_task(orphans_response.task) + + if self.orphan_protection_time == 0: + with self.assertRaises(ApiException): + artifacts_api.read(artifact.pulp_href) + + if self.storage == "pulpcore.app.models.storage.FileSystem": + with self.assertRaises(CalledProcessError): + self.cli_client.run(cmd) + + +class OrphansCleanUpTestCase(unittest.TestCase): + """Test the orphan cleanup endpoint. + + An orphan artifact is an artifact that is not in any content units. + An orphan content unit is a content unit that is not in any repository + version. + + """ + + @classmethod + def setUpClass(cls): + """Create class-wide variables.""" + cls.cfg = config.get_config() + cls.api_client = ApiClient(configuration) + cls.cli_client = cli.Client(cls.cfg) + cls.core_client = gen_pulpcore_client() + cls.orphans_cleanup_api = OrphansCleanupApi(cls.core_client) + cls.storage = utils.get_pulp_setting(cls.cli_client, "DEFAULT_FILE_STORAGE") + cls.media_root = utils.get_pulp_setting(cls.cli_client, "MEDIA_ROOT") + + orphans_response = cls.orphans_cleanup_api.cleanup({"orphan_protection_time": 0}) + monitor_task(orphans_response.task) + + def test_clean_orphan_content_unit(self): + """Test whether orphaned content units can be cleaned up.""" + repo_api = RepositoriesFileApi(self.api_client) + remote_api = RemotesFileApi(self.api_client) + + repo = repo_api.create(gen_repo()) + self.addCleanup(repo_api.delete, repo.pulp_href) + + body = gen_file_remote() + remote = remote_api.create(body) + self.addCleanup(remote_api.delete, remote.pulp_href) + + # Sync the repository. + self.assertEqual(repo.latest_version_href, f"{repo.pulp_href}versions/0/") + repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) + sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) + monitor_task(sync_response.task) + repo = repo_api.read(repo.pulp_href) + content = choice(get_content(repo.to_dict())[FILE_CONTENT_NAME]) + + # Create an orphan content unit. + repo_api.modify(repo.pulp_href, dict(remove_content_units=[content["pulp_href"]])) + + artifacts_api = ArtifactsApi(self.core_client) + + if self.storage == "pulpcore.app.models.storage.FileSystem": + # Verify that the artifact is present on disk. + relative_path = artifacts_api.read(content["artifact"]).file + artifact_path = os.path.join(self.media_root, relative_path) + cmd = ("ls", artifact_path) + self.cli_client.run(cmd, sudo=True) + + file_contents_api = ContentFilesApi(self.api_client) + # Delete first repo version. The previous removed content unit will be + # an orphan. + delete_version(repo, get_versions(repo.to_dict())[1]["pulp_href"]) + content_units = file_contents_api.list().to_dict()["results"] + content_units_href = [c["pulp_href"] for c in content_units] + self.assertIn(content["pulp_href"], content_units_href) + + content_before_cleanup = file_contents_api.list().count + orphans_response = self.orphans_cleanup_api.cleanup({"orphan_protection_time": 10}) + monitor_task(orphans_response.task) + + # assert content was not removed + content_after_cleanup = file_contents_api.list().count + self.assertEqual(content_after_cleanup, content_before_cleanup) + + orphans_response = self.orphans_cleanup_api.cleanup({"orphan_protection_time": 0}) + monitor_task(orphans_response.task) + + content_units = file_contents_api.list().to_dict()["results"] + content_units_href = [c["pulp_href"] for c in content_units] + self.assertNotIn(content["pulp_href"], content_units_href) + + if self.storage == "pulpcore.app.models.storage.FileSystem": + # Verify that the artifact was removed from disk. + with self.assertRaises(CalledProcessError): + self.cli_client.run(cmd) + + def test_clean_orphan_artifact(self): + """Test whether orphan artifacts units can be clean up.""" + repo_api = RepositoriesFileApi(self.api_client) + repo = repo_api.create(gen_repo()) + self.addCleanup(repo_api.delete, repo.pulp_href) + + artifacts_api = ArtifactsApi(self.core_client) + artifact = artifacts_api.create(file=__file__) + + if self.storage == "pulpcore.app.models.storage.FileSystem": + cmd = ("ls", os.path.join(self.media_root, artifact.file)) + self.cli_client.run(cmd, sudo=True) + + orphans_response = self.orphans_cleanup_api.cleanup({"orphan_protection_time": 10}) + monitor_task(orphans_response.task) + + # assert artifact was not removed + artifacts = artifacts_api.list().count + self.assertEqual(artifacts, 1) + + orphans_response = self.orphans_cleanup_api.cleanup({"orphan_protection_time": 0}) + monitor_task(orphans_response.task) + + with self.assertRaises(ApiException): + artifacts_api.read(artifact.pulp_href) + + if self.storage == "pulpcore.app.models.storage.FileSystem": + with self.assertRaises(CalledProcessError): + self.cli_client.run(cmd) + + def test_clean_specific_orphans(self): + """Test whether the `content_hrefs` param removes specific orphans but not others""" + repo_api = RepositoriesFileApi(self.api_client) + remote_api = RemotesFileApi(self.api_client) + + repo = repo_api.create(gen_repo()) + self.addCleanup(repo_api.delete, repo.pulp_href) + + body = gen_file_remote() + remote = remote_api.create(body) + self.addCleanup(remote_api.delete, remote.pulp_href) + + # Sync the repository. + self.assertEqual(repo.latest_version_href, f"{repo.pulp_href}versions/0/") + repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) + sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) + monitor_task(sync_response.task) + repo = repo_api.read(repo.pulp_href) + + # Create two orphaned content units. + content_a = get_content(repo.to_dict())[FILE_CONTENT_NAME][0]["pulp_href"] + content_b = get_content(repo.to_dict())[FILE_CONTENT_NAME][1]["pulp_href"] + content_to_remove = dict(remove_content_units=[content_a, content_b]) + repo_api.modify(repo.pulp_href, content_to_remove) + + file_contents_api = ContentFilesApi(self.api_client) + # Delete first repo version. The previous removed content unit will be an orphan. + delete_version(repo, get_versions(repo.to_dict())[1]["pulp_href"]) + content_units = file_contents_api.list().to_dict()["results"] + content_units_href = [c["pulp_href"] for c in content_units] + self.assertIn(content_a, content_units_href) + self.assertIn(content_b, content_units_href) + + cleanup_dict = {"content_hrefs": [content_a], "orphan_protection_time": 0} + orphans_response = self.orphans_cleanup_api.cleanup(cleanup_dict) + monitor_task(orphans_response.task) + + content_units = file_contents_api.list().to_dict()["results"] + content_units_href = [c["pulp_href"] for c in content_units] + self.assertNotIn(content_a, content_units_href) + self.assertIn(content_b, content_units_href) + + def test_clean_specific_orphans_but_no_orphans_specified(self): + """Test whether the `content_hrefs` param raises a ValidationError with [] as the value""" + content_hrefs_dict = {"content_hrefs": []} + self.assertRaises(ApiException, self.orphans_cleanup_api.cleanup, content_hrefs_dict) + + def test_clean_specific_orphans_but_invalid_orphan_specified(self): + """Test whether the `content_hrefs` param raises a ValidationError with and invalid href""" + content_hrefs_dict = {"content_hrefs": ["/not/a/valid/content/href"]} + self.assertRaises(ApiException, self.orphans_cleanup_api.cleanup, content_hrefs_dict) diff --git a/pulp_file/tests/functional/api/from_pulpcore/test_pagination.py b/pulp_file/tests/functional/api/from_pulpcore/test_pagination.py new file mode 100644 index 000000000..8cf12c719 --- /dev/null +++ b/pulp_file/tests/functional/api/from_pulpcore/test_pagination.py @@ -0,0 +1,143 @@ +"""Tests related to pagination.""" +import unittest +from random import randint, sample + +from pulp_smash import api, config +from pulp_smash.pulp3.bindings import monitor_task +from pulp_smash.pulp3.utils import gen_repo, get_versions, modify_repo + +from pulp_file.tests.functional.utils import populate_pulp +from .constants import ( + FILE_CONTENT_PATH, + FILE_MANY_FIXTURE_COUNT, + FILE_MANY_FIXTURE_MANIFEST_URL, + FILE_REPO_PATH, +) + + +class RepoVersionPaginationTestCase(unittest.TestCase): + """Test pagination of the core RepositoryVersion endpoints. + + This test case assumes that Pulp returns 100 elements in each page of + results. This is configurable, but the current default set by all known + Pulp installers. + """ + + @classmethod + def setUpClass(cls): + """Create class-wide variables.""" + cls.cfg = config.get_config() + cls.client = api.Client(cls.cfg, api.page_handler) + + def test_file_content(self): + """Test pagination for repository versions.""" + # Add content to Pulp, create a repo, and add content to repo. We + # sample 21 contents, because with page_size set to 10, this produces 3 + # pages, where the three three pages have unique combinations of values + # for the "previous" and "next" links. + populate_pulp(self.cfg, url=FILE_MANY_FIXTURE_MANIFEST_URL) + sample_size = min(FILE_MANY_FIXTURE_COUNT, 21) + contents = sample(self.client.get(FILE_CONTENT_PATH), sample_size) + repo = self.client.post(FILE_REPO_PATH, gen_repo()) + self.addCleanup(self.client.delete, repo["pulp_href"]) + + for content in contents: + modify_repo(self.cfg, repo, add_units=[content]) + + # Verify pagination works for getting repo versions. + repo = self.client.get(repo["pulp_href"]) + repo_versions = get_versions(repo, {"page_size": 10}) + self.assertEqual(len(repo_versions), sample_size + 1, repo_versions) + + +class PaginationTestCase(unittest.TestCase): + """Test pagination assuming that Pulp returns 100 elements in each page of results.""" + + @classmethod + def setUpClass(cls): + """Create class-wide variables.""" + cls.cfg = config.get_config() + cls.client = api.Client(cls.cfg, api.json_handler) + + def setUp(self): + self.repos = [] + self.number_to_create = 21 + + # Perform a sanity check. + repos = self.client.using_handler(api.page_handler).get(FILE_REPO_PATH) + assert len(repos) == 0, repos # AssertEqual not available here yet + + # Create repos + for _ in range(self.number_to_create): + repo = self.client.post(FILE_REPO_PATH, gen_repo()) + self.repos.append(repo) + + def tearDown(self): + responses = [] + for repo in self.repos: + responses.append(self.client.delete(repo["pulp_href"])) + for response in responses: + monitor_task(response["task"]) + + def test_pagination_workflow(self): + self._raw_pagination() + self._page_handler_pagination() + + def _raw_pagination(self): + """Assert content can be paginated page by page. + + Do the following: + + 1. Without using page_handler request content + 2. Save collected_results and assert it is equal the per_page param + 3. Assert there is a next link but not a previous link + 4. Loop pages "number_to_create / per_page" (3) + 5. For each page request next link and assert length equals per_page + 6. For each page assert the presence of next and previous links + 7. Assert last page is reached + 8. Assert the final count equals number_to_create + """ + + per_page = 7 # will result in 3 pages + resp = self.client.get(FILE_REPO_PATH, params={"limit": per_page}) + collected_results = resp["results"] + # First call returns 7 results + self.assertEqual(len(collected_results), per_page, collected_results) + # no previous but there is a next + self.assertIsNone(resp["previous"], resp["previous"]) + self.assertIsNotNone(resp["next"], resp["next"]) + + # paginate pages 2 and 3 + for page in range(int(self.number_to_create / per_page)): # [0, 1, 2] + if page == 1: + # there is a previous and a next + self.assertIsNotNone(resp["previous"], resp["previous"]) + self.assertIsNotNone(resp["next"], resp["next"]) + # must have twice the size + self.assertEqual(len(collected_results), per_page * 2, collected_results) + if page == 2: + # last page there is no next but there is a previous + self.assertIsNone(resp["next"], resp["next"]) + self.assertIsNotNone(resp["previous"], resp["previous"]) + # must have 3 x the size + self.assertEqual(len(collected_results), per_page * 3, collected_results) + break # last page reached + resp = self.client.get(resp["next"]) + page_results = resp["results"] + self.assertEqual(len(page_results), per_page, page_results) + collected_results.extend(page_results) + + # Assert the final count + self.assertEqual(len(collected_results), self.number_to_create, collected_results) + + def _page_handler_pagination(self): + """ + Assert page handler returns all items independent of page_size. + + This test asserts that pulp-smash page_handler will collect results from all pages and + return it in the same call independent of the page_size provided. + """ + repos = self.client.using_handler(api.page_handler).get( + FILE_REPO_PATH, params={"page_size": randint(2, 11)} + ) + self.assertEqual(len(repos), self.number_to_create, repos) diff --git a/pulp_file/tests/functional/api/from_pulpcore/test_proxy.py b/pulp_file/tests/functional/api/from_pulpcore/test_proxy.py new file mode 100644 index 000000000..4118eecda --- /dev/null +++ b/pulp_file/tests/functional/api/from_pulpcore/test_proxy.py @@ -0,0 +1,133 @@ +import pytest +from pulp_smash.pulp3.bindings import monitor_task, PulpTaskError + +from pulpcore.client.pulp_file import ( + RepositorySyncURL, +) + + +def _run_basic_sync_and_assert(remote, file_repo, file_repo_api_client, file_content_api_client): + body = RepositorySyncURL(remote=remote.pulp_href) + monitor_task(file_repo_api_client.sync(file_repo.pulp_href, body).task) + + # Check content is present, but no artifacts are there + content_response = file_content_api_client.list( + repository_version=f"{file_repo.versions_href}1/" + ) + assert content_response.count == 3 + for content in content_response.results: + assert content.artifact is None + + +@pytest.mark.parallel +def test_sync_http_through_http_proxy( + file_fixture_gen_remote, + file_repo, + file_repo_api_client, + file_content_api_client, + http_proxy, +): + """ + Test syncing http through a http proxy. + """ + remote_on_demand = file_fixture_gen_remote( + fixture_name="basic", policy="on_demand", proxy_url=http_proxy.proxy_url + ) + + _run_basic_sync_and_assert( + remote_on_demand, file_repo, file_repo_api_client, file_content_api_client + ) + + +@pytest.mark.parallel +def test_sync_https_through_http_proxy( + file_fixture_gen_remote_ssl, + file_repo, + file_repo_api_client, + file_content_api_client, + http_proxy, +): + """ + Test syncing https through a http proxy. + """ + remote_on_demand = file_fixture_gen_remote_ssl( + fixture_name="basic", policy="on_demand", proxy_url=http_proxy.proxy_url + ) + + _run_basic_sync_and_assert( + remote_on_demand, file_repo, file_repo_api_client, file_content_api_client + ) + + +@pytest.mark.parallel +def test_sync_https_through_http_proxy_with_auth( + file_fixture_gen_remote_ssl, + file_repo, + file_repo_api_client, + file_content_api_client, + http_proxy_with_auth, +): + """ + Test syncing https through a http proxy that requires auth. + """ + remote_on_demand = file_fixture_gen_remote_ssl( + fixture_name="basic", + policy="on_demand", + tls_validation="true", + proxy_url=http_proxy_with_auth.proxy_url, + proxy_username=http_proxy_with_auth.username, + proxy_password=http_proxy_with_auth.password, + ) + + _run_basic_sync_and_assert( + remote_on_demand, file_repo, file_repo_api_client, file_content_api_client + ) + + +@pytest.mark.parallel +def test_sync_https_through_http_proxy_with_auth_but_auth_not_configured( + file_fixture_gen_remote_ssl, + file_repo, + file_repo_api_client, + file_content_api_client, + http_proxy_with_auth, +): + """ + Test syncing https through a http proxy that requires auth, but auth is not configured. + """ + remote_on_demand = file_fixture_gen_remote_ssl( + fixture_name="basic", + policy="on_demand", + tls_validation="true", + proxy_url=http_proxy_with_auth.proxy_url, + ) + + try: + _run_basic_sync_and_assert( + remote_on_demand, file_repo, file_repo_api_client, file_content_api_client + ) + except PulpTaskError as exc: + assert "407, message='Proxy Authentication Required'" in exc.task.error["description"] + + +@pytest.mark.parallel +def test_sync_http_through_https_proxy( + file_fixture_gen_remote, + file_repo, + file_repo_api_client, + file_content_api_client, + https_proxy, +): + """ + Test syncing http through an https proxy. + """ + remote_on_demand = file_fixture_gen_remote( + fixture_name="basic", + policy="on_demand", + proxy_url=https_proxy.proxy_url, + tls_validation="false", # We instead should have a `proxy_insecure` option + ) + + _run_basic_sync_and_assert( + remote_on_demand, file_repo, file_repo_api_client, file_content_api_client + ) diff --git a/pulp_file/tests/functional/api/from_pulpcore/test_publications.py b/pulp_file/tests/functional/api/from_pulpcore/test_publications.py new file mode 100644 index 000000000..aa363e5bb --- /dev/null +++ b/pulp_file/tests/functional/api/from_pulpcore/test_publications.py @@ -0,0 +1,104 @@ +import unittest + +from pulp_smash import config +from pulp_smash.pulp3.bindings import delete_orphans, monitor_task + +from pulp_smash.pulp3.utils import ( + gen_repo, + get_content, +) + +from pulpcore.client.pulp_file import ( + FileFilePublication, + PublicationsFileApi, + RemotesFileApi, + RepositoriesFileApi, + RepositorySyncURL, +) +from pulpcore.client.pulp_file.exceptions import ApiException + +from pulp_file.tests.functional.utils import ( + gen_file_client, + gen_file_remote, +) +from .constants import ( + FILE_MANY_FIXTURE_MANIFEST_URL, + FILE_CONTENT_NAME, +) + + +class ContentInPublicationViewTestCase(unittest.TestCase): + @classmethod + def setUpClass(cls): + """Create class-wide variables.""" + cls.cfg = config.get_config() + cls.file_client = gen_file_client() + cls.repo_api = RepositoriesFileApi(cls.file_client) + cls.publication_api = PublicationsFileApi(cls.file_client) + cls.remote_api = RemotesFileApi(cls.file_client) + + @classmethod + def tearDownClass(cls): + delete_orphans() + + def test_all(self): + """Create two publications and check view filter.""" + repo = self.repo_api.create(gen_repo()) + self.addCleanup(self.repo_api.delete, repo.pulp_href) + + remote = self.remote_api.create(gen_file_remote()) + self.addCleanup(self.remote_api.delete, remote.pulp_href) + + # Sync and update repository data. + repo_sync_data = RepositorySyncURL(remote=remote.pulp_href) + sync_response = self.repo_api.sync(repo.pulp_href, repo_sync_data) + monitor_task(sync_response.task) + repo = self.repo_api.read(repo.pulp_href) + + # Test content doesn't exists. + non_existant_content_href = ( + "/pulp/api/v3/content/file/files/c4ed74cf-a806-490d-a25f-94c3c3dd2dd7/" + ) + with self.assertRaises(ApiException) as ctx: + self.publication_api.list(content=non_existant_content_href) + self.assertEqual(ctx.exception.status, 400) + + # Test not published content. + content_href = get_content(repo.to_dict())[FILE_CONTENT_NAME][0]["pulp_href"] + self.assertEqual(self.publication_api.list(content=content_href).to_dict()["count"], 0) + + # Publication + publication_data = FileFilePublication(repository=repo.pulp_href) + publication_response = self.publication_api.create(publication_data) + task_response = monitor_task(publication_response.task) + publication = self.publication_api.read(task_response.created_resources[0]) + self.addCleanup(self.publication_api.delete, publication.pulp_href) + + # Second publication + repo_second = self.repo_api.create(gen_repo()) + self.addCleanup(self.repo_api.delete, repo_second.pulp_href) + + body = gen_file_remote(url=FILE_MANY_FIXTURE_MANIFEST_URL) + remote_second = self.remote_api.create(body) + self.addCleanup(self.remote_api.delete, remote_second.pulp_href) + + repo_second_sync_data = RepositorySyncURL(remote=remote_second.pulp_href) + sync_response = self.repo_api.sync(repo_second.pulp_href, repo_second_sync_data) + monitor_task(sync_response.task) + repo_second = self.repo_api.read(repo_second.pulp_href) + + publication_data = FileFilePublication(repository=repo_second.pulp_href) + publication_response = self.publication_api.create(publication_data) + task_response = monitor_task(publication_response.task) + publication_second = self.publication_api.read(task_response.created_resources[0]) + self.addCleanup(self.publication_api.delete, publication_second.pulp_href) + + # Test there are two publications + self.assertEqual(self.publication_api.list().count, 2) + + # Test content match publication + self.assertEqual(self.publication_api.list(content=content_href).count, 1) + self.assertEqual( + self.publication_api.list(content=content_href).results[0].repository_version, + repo.latest_version_href, + ) diff --git a/pulp_file/tests/functional/api/from_pulpcore/test_pulpexport.py b/pulp_file/tests/functional/api/from_pulpcore/test_pulpexport.py new file mode 100644 index 000000000..338006742 --- /dev/null +++ b/pulp_file/tests/functional/api/from_pulpcore/test_pulpexport.py @@ -0,0 +1,421 @@ +""" +Tests PulpExporter and PulpExport functionality + +NOTE: assumes ALLOWED_EXPORT_PATHS setting contains "/tmp" - all tests will fail if this is not +the case. +""" +import unittest +from pulp_smash import api, cli, config, utils +from pulp_smash.utils import uuid4 +from pulp_smash.pulp3.bindings import monitor_task +from pulp_smash.pulp3.utils import ( + gen_repo, +) + +from pulpcore.client.pulpcore import ( + ApiClient as CoreApiClient, + ExportersPulpApi, + ExportersPulpExportsApi, +) +from pulpcore.client.pulpcore.exceptions import ApiException + +from pulpcore.client.pulp_file import ( + ContentFilesApi, + RepositoriesFileApi, + RepositoriesFileVersionsApi, + RepositorySyncURL, + RemotesFileApi, +) +from pulp_file.tests.functional.utils import ( + create_repo_and_versions, + delete_exporter, + gen_file_client, + gen_file_remote, +) +from .constants import TASK_STATES + +NUM_REPOS = 3 +MAX_EXPORTS = 3 +NUM_EXPORTERS = 4 + + +class BaseExporterCase(unittest.TestCase): + """ + Base functionality for Exporter and Export test classes + + The export process isn't possible without repositories having been sync'd - arranging for + that to happen once per-class (instead of once-per-test) is the primary purpose of this parent + class. + """ + + def _setup_repositories(self): + """Create and sync a number of repositories to be exported.""" + repos = [] + remotes = [] + for r in range(NUM_REPOS): + a_repo = self.repo_api.create(gen_repo()) + # give it a remote and sync it + body = gen_file_remote() + remote = self.remote_api.create(body) + repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) + sync_response = self.repo_api.sync(a_repo.pulp_href, repository_sync_data) + monitor_task(sync_response.task) + # remember it + a_repo = self.repo_api.read(file_file_repository_href=a_repo.pulp_href) + repos.append(a_repo) + remotes.append(remote) + return repos, remotes + + @classmethod + def setUpClass(cls): + cls.cfg = config.get_config() + cls.cli_client = cli.Client(cls.cfg) + allowed_exports = utils.get_pulp_setting(cls.cli_client, "ALLOWED_EXPORT_PATHS") + if not allowed_exports or "/tmp" not in allowed_exports: + raise unittest.SkipTest( + "Cannot run export-tests unless /tmp is in ALLOWED_EXPORT_PATHS ({}).".format( + allowed_exports + ), + ) + + cls.client = api.Client(cls.cfg, api.json_handler) + cls.core_client = CoreApiClient(configuration=cls.cfg.get_bindings_config()) + cls.file_client = gen_file_client() + + cls.content_api = ContentFilesApi(cls.file_client) + cls.repo_api = RepositoriesFileApi(cls.file_client) + cls.versions_api = RepositoriesFileVersionsApi(cls.file_client) + cls.remote_api = RemotesFileApi(cls.file_client) + cls.exporter_api = ExportersPulpApi(cls.core_client) + cls.exports_api = ExportersPulpExportsApi(cls.core_client) + + def setUp(self): + self.repos, self.remotes = self._setup_repositories() + + def tearDown(self): + responses = [] + for remote in self.remotes: + response = self.remote_api.delete(remote.pulp_href) + responses.append(response) + for repo in self.repos: + response = self.repo_api.delete(repo.pulp_href) + responses.append(response) + + for response in responses: + monitor_task(response.task) + + def _create_exporter(self, cleanup=True, use_repos=None): + """ + Utility routine to create an exporter for the available repositories. + + If all_repos, export everything in self.repos; otherwise only export first repo + """ + + body = { + "name": uuid4(), + "path": "/tmp/{}/".format(uuid4()), + "repositories": [r.pulp_href for r in self.repos], + } + if use_repos: + body["repositories"] = [r.pulp_href for r in use_repos] + + exporter = self.exporter_api.create(body) + if cleanup: + self.addCleanup(delete_exporter, exporter) + return exporter, body + + +class PulpExporterTestCase(BaseExporterCase): + """Test PulpExporter CURDL methods.""" + + def test_workflow(self): + self._create() + self._read() + self._partial_update() + self._list() + self._delete() + + def _create(self): + """Create a PulpExporter.""" + (exporter, body) = self._create_exporter() + self.assertIsNone(exporter.last_export) + self.assertEqual(body["name"], exporter.name) + self.assertEqual(body["path"], exporter.path) + self.assertEqual(len(self.repos), len(exporter.repositories)) + + def _read(self): + """Read a created PulpExporter.""" + (exporter_created, body) = self._create_exporter() + exporter_read = self.exporter_api.read(exporter_created.pulp_href) + self.assertEqual(exporter_created.name, exporter_read.name) + self.assertEqual(exporter_created.path, exporter_read.path) + self.assertEqual(len(exporter_created.repositories), len(exporter_read.repositories)) + + def _partial_update(self): + """Update a PulpExporter's path.""" + (exporter_created, body) = self._create_exporter() + body = {"path": "/tmp/{}".format(uuid4())} + result = self.exporter_api.partial_update(exporter_created.pulp_href, body) + monitor_task(result.task) + exporter_read = self.exporter_api.read(exporter_created.pulp_href) + self.assertNotEqual(exporter_created.path, exporter_read.path) + self.assertEqual(body["path"], exporter_read.path) + + def _list(self): + """Show a set of created PulpExporters.""" + starting_exporters = self.exporter_api.list().results + for x in range(NUM_EXPORTERS): + self._create_exporter() + ending_exporters = self.exporter_api.list().results + self.assertEqual(NUM_EXPORTERS, len(ending_exporters) - len(starting_exporters)) + + def _delete(self): + """Delete a pulpExporter.""" + (exporter_created, body) = self._create_exporter(cleanup=False) + delete_exporter(exporter_created) + try: + self.exporter_api.read(exporter_created.pulp_href) + except ApiException as ae: + self.assertEqual(404, ae.status) + return + self.fail("Found a deleted exporter!") + + +class PulpExportTestCase(BaseExporterCase): + """Test PulpExport CRDL methods (Update is not allowed).""" + + def _gen_export(self, exporter, body={}): + """Create and read back an export for the specified PulpExporter.""" + export_response = self.exports_api.create(exporter.pulp_href, body) + monitor_task(export_response.task) + task = self.client.get(export_response.task) + resources = task["created_resources"] + self.assertEqual(1, len(resources)) + reports = task["progress_reports"] + found_artifacts = False + found_content = False + for r in reports: + self.assertEqual(TASK_STATES.COMPLETED, r["state"]) + found_artifacts |= r["code"] == "export.artifacts" + found_content |= r["code"] == "export.repo.version.content" + self.assertTrue(found_artifacts, "No artifacts exported!") + self.assertTrue(found_content, "No content exported!") + export_href = resources[0] + export = self.exports_api.read(export_href) + self.assertIsNotNone(export) + return export + + def test_workflow(self): + self._export() + self._list() + self._delete() + self._export_by_version_validation() + self._export_by_version_results() + self._incremental() + self._chunking() + self._start_end_incrementals() + + def _export(self): + """Issue and evaluate a PulpExport (tests both Create and Read).""" + (exporter, body) = self._create_exporter(cleanup=False) + try: + export = self._gen_export(exporter) + self.assertIsNotNone(export) + self.assertEqual(len(exporter.repositories), len(export.exported_resources)) + self.assertIsNotNone(export.output_file_info) + self.assertIsNotNone(export.toc_info) + for an_export_filename in export.output_file_info.keys(): + self.assertFalse("//" in an_export_filename) + + finally: + delete_exporter(exporter) + + def _list(self): + """Find all the PulpExports for a PulpExporter.""" + (exporter, body) = self._create_exporter(cleanup=False) + try: + export = None + for i in range(MAX_EXPORTS): + export = self._gen_export(exporter) + exporter = self.exporter_api.read(exporter.pulp_href) + self.assertEqual(exporter.last_export, export.pulp_href) + exports = self.exports_api.list(exporter.pulp_href).results + self.assertEqual(MAX_EXPORTS, len(exports)) + finally: + delete_exporter(exporter) + + def _delete_export(self, export): + """ + Delete a PulpExport and test that it is gone. + + :param export: PulpExport to be deleted + :return: true if specified export is gone, false if we can still find it + """ + self.exports_api.delete(export.pulp_href) + try: + self.exports_api.read(export.pulp_href) + except ApiException as ae: + self.assertEqual(404, ae.status) + return True + return False + + def _create_repo_and_versions(self): + a_repo, versions = create_repo_and_versions( + self.repos[0], self.repo_api, self.versions_api, self.content_api + ) + self.addCleanup(self.client.delete, a_repo.pulp_href) + self.assertIsNotNone(versions) + self.assertEqual(4, versions.count) + return a_repo, versions + + def _delete(self): + """ + Test deleting exports for a PulpExporter. + + NOTE: Attempting to delete the current last_export is forbidden. + """ + (exporter, body) = self._create_exporter(cleanup=False) + try: + # Do three exports + first_export = self._gen_export(exporter) + self._gen_export(exporter) + last_export = self._gen_export(exporter) + + # delete one make sure it's gone + if not self._delete_export(first_export): + self.fail("Failed to delete an export") + + # make sure the exporter knows it's gone + exporter = self.exporter_api.read(exporter.pulp_href) + exports = self.exports_api.list(exporter.pulp_href).results + self.assertEqual(2, len(exports)) + + # Now try to delete the last_export export and succeed + # as of https://pulp.plan.io/issues/6555 + self._delete_export(last_export) + # Make sure the exporter is still around... + exporter = self.exporter_api.read(exporter.pulp_href) + finally: + delete_exporter(exporter) + + def _export_by_version_validation(self): + repositories = self.repos + latest_versions = [r.latest_version_href for r in repositories] + + # exporter for one repo. specify one version + (exporter, body) = self._create_exporter(use_repos=[repositories[0]]) + body = {"versions": [latest_versions[0]]} + self._gen_export(exporter, body) + + # exporter for one repo. specify one *wrong* version + with self.assertRaises(ApiException) as ae: + (exporter, body) = self._create_exporter(use_repos=[repositories[0]]) + body = {"versions": [latest_versions[1]]} + self._gen_export(exporter, body) + self.assertTrue("must belong to" in ae.exception.body) + + # exporter for two repos, specify one version + with self.assertRaises(ApiException) as ae: + (exporter, body) = self._create_exporter(use_repos=[repositories[0], repositories[1]]) + body = {"versions": [latest_versions[0]]} + self._gen_export(exporter, body) + self.assertTrue("does not match the number" in ae.exception.body) + + # exporter for two repos, specify one correct and one *wrong* version + with self.assertRaises(ApiException) as ae: + (exporter, body) = self._create_exporter(use_repos=[repositories[0], repositories[1]]) + body = {"versions": [latest_versions[0], latest_versions[2]]} + self._gen_export(exporter, body) + self.assertTrue("must belong to" in ae.exception.body) + + def _export_by_version_results(self): + repositories = self.repos + latest_versions = [r.latest_version_href for r in repositories] + zeroth_versions = [] + for v in latest_versions: + v_parts = v.split("/") + v_parts[-2] = "0" + zeroth_versions.append("/".join(v_parts)) + + (exporter, body) = self._create_exporter(use_repos=[repositories[0]], cleanup=False) + try: + # export no-version, check that /1/ was exported + export = self._gen_export(exporter) + self.assertTrue(export.exported_resources[0].endswith("/1/")) + # exporter-by-version, check that /0/ was exported + body = {"versions": [zeroth_versions[0]]} + export = self._gen_export(exporter, body) + self.assertTrue(export.exported_resources[0].endswith("/0/")) + finally: + delete_exporter(exporter) + + def _incremental(self): + # create a repo with 4 repo-versions + a_repo, versions = self._create_repo_and_versions() + # create exporter for that repository + (exporter, body) = self._create_exporter(use_repos=[a_repo], cleanup=False) + try: + # negative - ask for an incremental without having a last_export + with self.assertRaises(ApiException): + body = {"full": False} + self._gen_export(exporter, body) + + # export repo-2-version[1]-full versions.results[1] + body = {"versions": [versions.results[1].pulp_href]} + self._gen_export(exporter, body) + # export repo-2-version[2] + body = {"versions": [versions.results[2].pulp_href], "full": False} + self._gen_export(exporter, body) + # export repo-2-latest + body = {"full": False} + self._gen_export(exporter, body) + finally: + delete_exporter(exporter) + + def _chunking(self): + a_repo = self.repo_api.create(gen_repo()) + self.addCleanup(self.client.delete, a_repo.pulp_href) + (exporter, body) = self._create_exporter(use_repos=[a_repo], cleanup=False) + try: + body = {"chunk_size": "250B"} + export = self._gen_export(exporter, body) + info = export.output_file_info + self.assertIsNotNone(info) + self.assertTrue(len(info) > 1) + finally: + delete_exporter(exporter) + + def _start_end_incrementals(self): + # create a repo with 4 repo-versions + a_repo, versions = self._create_repo_and_versions() + (exporter, body) = self._create_exporter(use_repos=[a_repo], cleanup=False) + try: + # export from version-1 to latest last=v3 + body = {"start_versions": [versions.results[1].pulp_href], "full": False} + self._gen_export(exporter, body) + + # export from version-1 to version-2, last=v2 + body = { + "start_versions": [versions.results[1].pulp_href], + "versions": [versions.results[2].pulp_href], + "full": False, + } + self._gen_export(exporter, body) + + # negative attempt, start_versions= is not a version + with self.assertRaises(ApiException): + body = {"start_versions": [a_repo.pulp_href], "full": False} + self._gen_export(exporter, body) + + # negative attempt, start_versions= and Full=True + with self.assertRaises(ApiException): + body = {"start_versions": [versions.results[2].pulp_href], "full": True} + self._gen_export(exporter, body) + + # negative attempt, start_versions= is a version from Some Other Repo + with self.assertRaises(ApiException): + second_repo, second_versions = self._create_repo_and_versions() + body = {"start_versions": [second_versions.results[0].pulp_href], "full": False} + self._gen_export(exporter, body) + finally: + delete_exporter(exporter) diff --git a/pulp_file/tests/functional/api/from_pulpcore/test_pulpimport.py b/pulp_file/tests/functional/api/from_pulpcore/test_pulpimport.py new file mode 100644 index 000000000..96105532e --- /dev/null +++ b/pulp_file/tests/functional/api/from_pulpcore/test_pulpimport.py @@ -0,0 +1,483 @@ +""" +Tests PulpImporter and PulpImport functionality + +NOTE: assumes ALLOWED_EXPORT_PATHS and ALLOWED_IMPORT_PATHS settings contain "/tmp" - all tests +will fail if this is not the case. +""" +import json +import unittest + +from pulp_smash import api, cli, config +from pulp_smash.utils import uuid4, get_pulp_setting +from pulp_smash.pulp3.bindings import delete_orphans, monitor_task, monitor_task_group +from pulp_smash.pulp3.utils import ( + gen_repo, +) + +from pulpcore.client.pulpcore import ( + ApiClient as CoreApiClient, + ExportersPulpExportsApi, + ExportersPulpApi, + ImportersPulpImportCheckApi, + ImportersPulpImportsApi, + ImportersPulpApi, +) + +from pulpcore.client.pulpcore.exceptions import ApiException + +from pulpcore.client.pulp_file import ( + ContentFilesApi, + RepositoriesFileApi, + RepositoriesFileVersionsApi, + RepositorySyncURL, + RemotesFileApi, +) + +from pulp_file.tests.functional.utils import ( + create_repo_and_versions, + delete_exporter, + gen_file_client, + gen_file_remote, +) + + +NUM_REPOS = 2 + + +class PulpImportTestCase(unittest.TestCase): + """ + Base functionality for PulpImporter and PulpImport test classes + """ + + @classmethod + def _setup_repositories(cls): + """Create and sync a number of repositories to be exported.""" + # create and remember a set of repo + import_repos = [] + export_repos = [] + remotes = [] + for r in range(NUM_REPOS): + import_repo = cls.repo_api.create(gen_repo()) + export_repo = cls.repo_api.create(gen_repo()) + body = gen_file_remote() + remote = cls.remote_api.create(body) + repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) + sync_response = cls.repo_api.sync(export_repo.pulp_href, repository_sync_data) + monitor_task(sync_response.task) + export_repo = cls.repo_api.read(export_repo.pulp_href) + # remember it + export_repos.append(export_repo) + import_repos.append(import_repo) + remotes.append(remote) + return import_repos, export_repos, remotes + + @classmethod + def _create_exporter(cls, cleanup=True): + body = { + "name": uuid4(), + "repositories": [r.pulp_href for r in cls.export_repos], + "path": "/tmp/{}".format(uuid4()), + } + exporter = cls.exporter_api.create(body) + return exporter + + @classmethod + def _create_export(cls): + export_response = cls.exports_api.create(cls.exporter.pulp_href, {}) + monitor_task(export_response.task) + task = cls.client.get(export_response.task) + resources = task["created_resources"] + export_href = resources[0] + export = cls.exports_api.read(export_href) + return export + + @classmethod + def _create_chunked_export(cls): + export_response = cls.exports_api.create(cls.exporter.pulp_href, {"chunk_size": "5KB"}) + monitor_task(export_response.task) + task = cls.client.get(export_response.task) + resources = task["created_resources"] + export_href = resources[0] + export = cls.exports_api.read(export_href) + return export + + @classmethod + def _setup_import_check_directories(cls): + """Creates a directory/file structure for testing import-check""" + cli_client = cli.Client(cls.cfg) + cmd = ( + "mkdir", + "-p", + "/tmp/importcheck/noreaddir", + "/tmp/importcheck/nowritedir", + "/tmp/importcheck/nowritedir/notafile", + ) + cli_client.run(cmd, sudo=False) + + cmd = ("touch", "/tmp/importcheck/noreadfile") + cli_client.run(cmd, sudo=False) + + cmd = ("touch", "/tmp/importcheck/noreaddir/goodfile") + cli_client.run(cmd, sudo=False) + + cmd = ("touch", "/tmp/importcheck/nowritedir/goodfile") + cli_client.run(cmd, sudo=False) + + cmd = ("touch", "/tmp/importcheck/nowritedir/noreadfile") + cli_client.run(cmd, sudo=False) + + cmd = ("chmod", "333", "/tmp/importcheck/nowritedir/noreadfile") + cli_client.run(cmd, sudo=False) + + cmd = ("chmod", "333", "/tmp/importcheck/noreadfile") + cli_client.run(cmd, sudo=False) + + cmd = ("chmod", "333", "/tmp/importcheck/noreaddir") + cli_client.run(cmd, sudo=False) + + cmd = ("chmod", "555", "/tmp/importcheck/nowritedir") + cli_client.run(cmd, sudo=False) + + @classmethod + def setUpClass(cls): + """Create class-wide variables.""" + cls.cfg = config.get_config() + cls.cli_client = cli.Client(cls.cfg) + allowed_imports = get_pulp_setting(cls.cli_client, "ALLOWED_IMPORT_PATHS") + if not allowed_imports or "/tmp" not in allowed_imports: + raise unittest.SkipTest( + "Cannot run import-tests unless /tmp is in ALLOWED_IMPORT_PATHS ({}).".format( + allowed_imports + ), + ) + + cls.client = api.Client(cls.cfg, api.json_handler) + cls.core_client = CoreApiClient(configuration=cls.cfg.get_bindings_config()) + cls.file_client = gen_file_client() + + cls.repo_api = RepositoriesFileApi(cls.file_client) + cls.remote_api = RemotesFileApi(cls.file_client) + cls.versions_api = RepositoriesFileVersionsApi(cls.file_client) + cls.content_api = ContentFilesApi(cls.file_client) + cls.exporter_api = ExportersPulpApi(cls.core_client) + cls.exports_api = ExportersPulpExportsApi(cls.core_client) + cls.importer_api = ImportersPulpApi(cls.core_client) + cls.imports_api = ImportersPulpImportsApi(cls.core_client) + + cls.import_check_api = ImportersPulpImportCheckApi(cls.core_client) + + (cls.import_repos, cls.export_repos, cls.remotes) = cls._setup_repositories() + cls.exporter = cls._create_exporter() + cls.export = cls._create_export() + cls.chunked_export = cls._create_chunked_export() + cls._setup_import_check_directories() + + @classmethod + def _delete_import_check_structures(cls): + """Deletes the directory tree used for testing import-check""" + cli_client = cli.Client(cls.cfg) + cmd = ("chmod", "-R", "+rwx", "/tmp/importcheck/") + cli_client.run(cmd, sudo=False) + cmd = ("rm", "-rf", "/tmp/importcheck") + cli_client.run(cmd, sudo=False) + + @classmethod + def _create_repo_and_versions(cls): + a_repo, versions = create_repo_and_versions( + cls.export_repos[0], cls.repo_api, cls.versions_api, cls.content_api + ) + return a_repo, versions + + @classmethod + def tearDownClass(cls): + """Clean up.""" + for remote in cls.remotes: + cls.remote_api.delete(remote.pulp_href) + for repo in cls.export_repos: + cls.repo_api.delete(repo.pulp_href) + for repo in cls.import_repos: + cls.repo_api.delete(repo.pulp_href) + delete_exporter(cls.exporter) + cls._delete_import_check_structures() + delete_orphans() + + def _create_importer(self, name=None, cleanup=True, exported_repos=None): + """Create an importer.""" + mapping = {} + if not name: + name = uuid4() + if not exported_repos: + exported_repos = self.export_repos + + for idx, repo in enumerate(exported_repos): + mapping[repo.name] = self.import_repos[idx].name + + body = { + "name": name, + "repo_mapping": mapping, + } + + importer = self.importer_api.create(body) + + if cleanup: + self.addCleanup(self.importer_api.delete, importer.pulp_href) + + return importer + + def _find_toc(self): + filenames = [ + f for f in list(self.chunked_export.output_file_info.keys()) if f.endswith("json") + ] + return filenames[0] + + def _find_path(self): + filenames = [f for f in list(self.export.output_file_info.keys()) if f.endswith("tar.gz")] + return filenames[0] + + def _perform_import(self, importer, chunked=False, an_export=None): + """Perform an import with importer.""" + if not an_export: + an_export = self.chunked_export if chunked else self.export + + if chunked: + filenames = [f for f in list(an_export.output_file_info.keys()) if f.endswith("json")] + import_response = self.imports_api.create(importer.pulp_href, {"toc": filenames[0]}) + else: + filenames = [f for f in list(an_export.output_file_info.keys()) if f.endswith("tar.gz")] + import_response = self.imports_api.create(importer.pulp_href, {"path": filenames[0]}) + task_group = monitor_task_group(import_response.task_group) + + return task_group + + def test_workflow(self): + self._importer_create() + self._importer_delete() + self._import() + self._double_import() + self._chunked_import() + self._import_check_valid_path() + self._import_check_valid_toc() + self._import_check_repo_mapping() + self._import_check_not_allowed() + self._import_check_no_file() + self._import_check_all_valid() + self._import_check_multiple_errors() + self._import_not_latest_version() + + def _importer_create(self): + """Test creating an importer.""" + name = uuid4() + importer = self._create_importer(name) + + self.assertEqual(importer.name, name) + importer = self.importer_api.read(importer.pulp_href) + self.assertEqual(importer.name, name) + + def _importer_delete(self): + """Test deleting an importer.""" + importer = self._create_importer(cleanup=False) + + self.importer_api.delete(importer.pulp_href) + + with self.assertRaises(ApiException) as ae: + self.importer_api.read(importer.pulp_href) + + self.assertEqual(404, ae.exception.status) + + def _import(self): + """Test an import.""" + importer = self._create_importer() + task_group = self._perform_import(importer) + self.assertEqual(len(self.import_repos) + 1, task_group.completed) + + for report in task_group.group_progress_reports: + if report.code == "import.repo.versions": + self.assertEqual(report.done, len(self.import_repos)) + + for repo in self.import_repos: + repo = self.repo_api.read(repo.pulp_href) + self.assertEqual(f"{repo.pulp_href}versions/1/", repo.latest_version_href) + + def _double_import(self): + """Test two imports of our export.""" + importer = self._create_importer() + self._perform_import(importer) + self._perform_import(importer) + + imports = self.imports_api.list(importer.pulp_href).results + self.assertEqual(len(imports), 2) + + for repo in self.import_repos: + repo = self.repo_api.read(repo.pulp_href) + # still only one version as pulp won't create a new version if nothing changed + self.assertEqual(f"{repo.pulp_href}versions/1/", repo.latest_version_href) + + def _chunked_import(self): + """Test an import.""" + importer = self._create_importer() + task_group = self._perform_import(importer, chunked=True) + self.assertEqual(len(self.import_repos) + 1, task_group.completed) + for repo in self.import_repos: + repo = self.repo_api.read(repo.pulp_href) + self.assertEqual(f"{repo.pulp_href}versions/1/", repo.latest_version_href) + + def _import_check_valid_path(self): + body = {"path": self._find_path()} + result = self.import_check_api.pulp_import_check_post(body) + self.assertEqual(result.path.context, self._find_path()) + self.assertTrue(result.path.is_valid) + self.assertEqual(len(result.path.messages), 0) + self.assertIsNone(result.toc) + self.assertIsNone(result.repo_mapping) + + def _import_check_valid_toc(self): + body = {"toc": self._find_toc()} + result = self.import_check_api.pulp_import_check_post(body) + self.assertEqual(result.toc.context, self._find_toc()) + self.assertTrue(result.toc.is_valid) + self.assertEqual(len(result.toc.messages), 0) + self.assertIsNone(result.path) + self.assertIsNone(result.repo_mapping) + + def _import_check_repo_mapping(self): + body = {"repo_mapping": json.dumps({"foo": "bar"})} + result = self.import_check_api.pulp_import_check_post(body) + self.assertEqual(result.repo_mapping.context, json.dumps({"foo": "bar"})) + self.assertTrue(result.repo_mapping.is_valid) + self.assertEqual(len(result.repo_mapping.messages), 0) + self.assertIsNone(result.path) + self.assertIsNone(result.toc) + + body = {"repo_mapping": '{"foo": "bar"'} + result = self.import_check_api.pulp_import_check_post(body) + self.assertEqual(result.repo_mapping.context, '{"foo": "bar"') + self.assertFalse(result.repo_mapping.is_valid) + self.assertEqual(result.repo_mapping.messages[0], "invalid JSON") + + def _import_check_not_allowed(self): + body = {"path": "/notinallowedimports"} + result = self.import_check_api.pulp_import_check_post(body) + self.assertEqual(result.path.context, "/notinallowedimports") + self.assertFalse(result.path.is_valid) + self.assertEqual(len(result.path.messages), 1, "Only not-allowed should be returned") + self.assertEqual(result.path.messages[0], "/ is not an allowed import path") + + body = {"toc": "/notinallowedimports"} + result = self.import_check_api.pulp_import_check_post(body) + self.assertEqual(result.toc.context, "/notinallowedimports") + self.assertFalse(result.toc.is_valid) + self.assertEqual(len(result.toc.messages), 1, "Only not-allowed should be returned") + self.assertEqual(result.toc.messages[0], "/ is not an allowed import path") + + def _import_check_no_file(self): + body = {"path": "/tmp/idonotexist"} + result = self.import_check_api.pulp_import_check_post(body) + self.assertEqual(result.path.context, "/tmp/idonotexist") + self.assertFalse(result.path.is_valid) + self.assertTrue( + any("file /tmp/idonotexist does not exist" in s for s in result.path.messages) + ) + + body = {"toc": "/tmp/idonotexist"} + result = self.import_check_api.pulp_import_check_post(body) + self.assertEqual(result.toc.context, "/tmp/idonotexist") + self.assertFalse(result.toc.is_valid) + self.assertTrue( + any("file /tmp/idonotexist does not exist" in s for s in result.toc.messages) + ) + + def _import_check_all_valid(self): + body = { + "path": self._find_path(), + "toc": self._find_toc(), + "repo_mapping": json.dumps({"foo": "bar"}), + } + result = self.import_check_api.pulp_import_check_post(body) + self.assertEqual(result.path.context, self._find_path()) + self.assertEqual(result.toc.context, self._find_toc()) + self.assertEqual(result.repo_mapping.context, json.dumps({"foo": "bar"})) + + self.assertTrue(result.path.is_valid) + self.assertTrue(result.toc.is_valid) + self.assertTrue(result.repo_mapping.is_valid) + + self.assertEqual(len(result.path.messages), 0) + self.assertEqual(len(result.toc.messages), 0) + self.assertEqual(len(result.repo_mapping.messages), 0) + + def _import_check_multiple_errors(self): + body = { + "path": "/notinallowedimports", + "toc": "/tmp/importcheck/nowritedir/notafile", + "repo_mapping": '{"foo": "bar"', + } + result = self.import_check_api.pulp_import_check_post(body) + + self.assertFalse(result.path.is_valid) + self.assertEqual(len(result.path.messages), 1, "Only not-allowed should be returned") + self.assertEqual(result.path.messages[0], "/ is not an allowed import path") + + self.assertFalse(result.toc.is_valid) + self.assertTrue( + any( + "/tmp/importcheck/nowritedir/notafile is not a file" in s + for s in result.toc.messages + ) + ) + # FAILS IN CI, passes locally + # self.assertTrue( + # any( + # "directory /tmp/importcheck/nowritedir must allow pulp write-access" in s + # for s in result.toc.messages + # ) + # ) + + self.assertFalse(result.repo_mapping.is_valid) + self.assertEqual(result.repo_mapping.messages[0], "invalid JSON") + + def _gen_export(self, exporter, body={}): + """Create and read back an export for the specified PulpExporter.""" + export_response = self.exports_api.create(exporter.pulp_href, body) + monitor_task(export_response.task) + task = self.client.get(export_response.task) + resources = task["created_resources"] + export_href = resources[0] + export = self.exports_api.read(export_href) + return export + + def _export_first_version(self, a_repo, versions): + body = { + "name": uuid4(), + "repositories": [a_repo.pulp_href], + "path": "/tmp/{}".format(uuid4()), + } + exporter = self.exporter_api.create(body) + self.addCleanup(delete_exporter, exporter) + # export from version-0 to version-1, last=v1 + body = { + "start_versions": [versions.results[0].pulp_href], + "versions": [versions.results[1].pulp_href], + "full": False, + } + export = self._gen_export(exporter, body) + return export + + def _import_not_latest_version(self): + try: + repo, versions = self._create_repo_and_versions() + + export = self._export_first_version(repo, versions) + """Test an import.""" + importer = self._create_importer(exported_repos=[repo]) + task_group = self._perform_import(importer, chunked=False, an_export=export) + + for report in task_group.group_progress_reports: + if report.code == "import.repo.versions": + self.assertEqual(report.done, 1) + + imported_repo = self.repo_api.read(self.import_repos[0].pulp_href) + self.assertNotEqual( + f"{imported_repo.pulp_href}versions/0/", imported_repo.latest_version_href + ) + finally: + self.repo_api.delete(repo.pulp_href) diff --git a/pulp_file/tests/functional/api/from_pulpcore/test_purge.py b/pulp_file/tests/functional/api/from_pulpcore/test_purge.py new file mode 100644 index 000000000..88d53bbf7 --- /dev/null +++ b/pulp_file/tests/functional/api/from_pulpcore/test_purge.py @@ -0,0 +1,342 @@ +""" +Tests task-purge functionality. +""" +from datetime import datetime, timedelta, timezone + +from pulpcore.client.pulpcore import ( + ApiClient, + ApiException, + Purge, + TasksApi, +) +from pulpcore.client.pulp_file import ( + RemotesFileApi, + RepositoriesFileApi, + RepositorySyncURL, +) + +from pulpcore.constants import TASK_STATES, TASK_FINAL_STATES + +from pulp_smash import config +from pulp_smash.pulp3.bindings import ( + monitor_task, + PulpTestCase, + PulpTaskError, +) + +from pulp_file.tests.functional.utils import ( + gen_file_remote, + gen_repo, + gen_file_client, + gen_user, + del_user, +) + +TOMORROW_STR = (datetime.now(timezone.utc) + timedelta(days=1)).strftime("%Y-%m-%dT%H:%M") + + +class TaskPurgeTestCase(PulpTestCase): + """ + Test task-purge functionality. + """ + + def _task_summary(self): + """ + Summary of number of tasks in all known task-states. + :return: tuple of (total-tasks, Dict(state: count)) + """ + summary = {} + total = 0 + final_total = 0 + for state in TASK_STATES.__dict__.values(): + response = self.task_api.list(state=state) + summary[state] = response.count + total += summary[state] + final_total += summary[state] if state in TASK_FINAL_STATES else 0 + return total, final_total, summary + + def _purge_report_total(self, task): + for report in task.progress_reports: + if report.code == "purge.tasks.total": + return report.total + self.fail("NO PURGE_TASKS_TOTAL?!?") + + def _purge_report_check(self, task): + subobj_total = 0 + total = 0 + for report in task.progress_reports: + if report["code"] != "purge.tasks.total": + total = report.total + else: + subobj_total += report.total + self.assertEqual(total, subobj_total) + + def _check_delete_report(self, task, expected): + # Make sure we reported the deletion + for report in task.progress_reports: + if report.code == "purge.tasks.key.core.Task": + self.assertEqual(report.total, expected) + break + else: + self.fail("NO core.Task DELETIONS?!?") + + @classmethod + def setUpClass(cls): + """Create repos, remotes, and api-clients for all tests.""" + cls.cfg = config.get_config() + cls.client = ApiClient(configuration=cls.cfg.get_bindings_config()) + cls.task_api = TasksApi(cls.client) + + cls.file_client = gen_file_client() + cls.remote_api = RemotesFileApi(cls.file_client) + cls.repo_api = RepositoriesFileApi(cls.file_client) + + def tearDown(self): + """Cleanup repos and remotes. Do the best we can, ignore any errors.""" + self.remote_api.delete(self.bad_remote.pulp_href) + self.remote_api.delete(self.good_remote.pulp_href) + self.repo_api.delete(self.bad_repo.pulp_href) + self.repo_api.delete(self.good_repo.pulp_href) + + def setUp(self): + """ + Give us tasks to operate on, and a summary of tasks before we got here. + + Sets up 1 completed sync, 1 failed. + """ + self.good_remote = self.remote_api.create(gen_file_remote(policy="on_demand")) + self.good_repo = self.repo_api.create(gen_repo()) + self.good_sync_data = RepositorySyncURL(remote=self.good_remote.pulp_href) + + self.bad_remote = self.remote_api.create( + gen_file_remote( + "https://fixtures.pulpproject.org/THEREISNOFILEREPOHERE/", policy="on_demand" + ) + ) + self.bad_repo = self.repo_api.create(gen_repo()) + self.bad_sync_data = RepositorySyncURL(remote=self.bad_remote.pulp_href) + + self.pre_total, self.pre_final, self.pre_summary = self._task_summary() + + # good sync + sync_response = self.repo_api.sync(self.good_repo.pulp_href, self.good_sync_data) + task = monitor_task(sync_response.task) + self.assertEqual(task.state, "completed") + self.completed_sync_task = task + + # bad sync + sync_response = self.repo_api.sync(self.bad_repo.pulp_href, self.bad_sync_data) + with self.assertRaises(PulpTaskError): + monitor_task(sync_response.task) + task = self.task_api.read(sync_response.task) + self.assertEqual(task.state, "failed") + self.failed_sync_task = task + + self.post_total, self.post_final, self.post_summary = self._task_summary() + self.assertEqual(self.post_total, (self.pre_total + 2)) + self.assertEqual(self.post_final, (self.pre_final + 2)) + + def test_purge_before_time(self): + """Purge that should find no tasks to delete.""" + dta = Purge(finished_before="1970-01-01T00:00") + response = self.task_api.purge(dta) + task = monitor_task(response.task) + new_total, new_final, new_summary = self._task_summary() + # Should have all tasks remaining (2 completed, 1 failed) + self.assertEqual(self.pre_total + 3, new_total) + # Should show we report having purged no tasks + self.assertEqual(self._purge_report_total(task), 0) + + def test_purge_defaults(self): + """Purge using defaults (finished_before=30-days-ago, state=completed)""" + dta = Purge() + response = self.task_api.purge(dta) + monitor_task(response.task) + + # default is "completed before 30 days ago" - so both sync tasks should still exist + # Make sure good sync-task still exists + self.task_api.read(self.completed_sync_task.pulp_href) + + # Make sure the failed sync still exists + self.task_api.read(self.failed_sync_task.pulp_href) + + def test_purge_all(self): + """Purge all tasks in any 'final' state.""" + states = list(TASK_FINAL_STATES) + dta = Purge(finished_before=TOMORROW_STR, states=states) + response = self.task_api.purge(dta) + task = monitor_task(response.task) + new_total, new_final, new_summary = self._task_summary() + self.assertEqual(1, new_final) # The purge-task is the only final-task left + + # Make sure good sync-task is gone + with self.assertRaises(ApiException): + self.task_api.read(self.completed_sync_task.pulp_href) + + # Make sure failed sync-task is gone + with self.assertRaises(ApiException): + self.task_api.read(self.failed_sync_task.pulp_href) + + # Make sure we reported the deletions + self._check_delete_report(task, self.pre_final + 2) + + def test_purge_leave_one(self): + """Arrange to leave one task unscathed.""" + # Leave only the failed sync + dta = Purge(finished_before=self.failed_sync_task.finished_at) + response = self.task_api.purge(dta) + task = monitor_task(response.task) + + # Make sure good sync-task is gone + with self.assertRaises(ApiException): + self.task_api.read(self.completed_sync_task.pulp_href) + + # Make sure the failed sync still exists + self.task_api.read(self.failed_sync_task.pulp_href) + + # Make sure we reported the task-deletion + self._check_delete_report(task, self.pre_summary["completed"] + 1) + + def test_purge_only_failed(self): + """Purge all failed tasks only.""" + dta = Purge(finished_before=TOMORROW_STR, states=["failed"]) + response = self.task_api.purge(dta) + monitor_task(response.task) + # completed sync-task should exist + self.task_api.read(self.completed_sync_task.pulp_href) + + # failed should not exist + with self.assertRaises(ApiException): + self.task_api.read(self.failed_sync_task.pulp_href) + + def test_bad_date(self): + """What happens if you use a bad date format?""" + dta = Purge(finished_before="THISISNOTADATE") + with self.assertRaises(ApiException): + self.task_api.purge(dta) + + def test_bad_state(self): + """What happens if you specify junk for a state?""" + dta = Purge(finished_before=TOMORROW_STR, states=["BAD STATE"]) + with self.assertRaises(ApiException): + self.task_api.purge(dta) + + def test_not_final_state(self): + """What happens if you use a valid state that isn't a 'final' one?""" + dta = Purge(finished_before=TOMORROW_STR, states=["running"]) + with self.assertRaises(ApiException): + self.task_api.purge(dta) + + +class TaskPurgeUserPermsTestCase(PulpTestCase): + """ + Test task-purge is correctly protected by user-perms. + + Create new-user + Sync as admin + Purge as new-user, sync-task should NOT be deleted + """ + + @classmethod + def setUpClass(cls): + cls.cfg = config.get_config() + cls.client = ApiClient(configuration=cls.cfg.get_bindings_config()) + cls.file_client = gen_file_client() + + def setUp(self): + self.admin_info = { + "task_api": TasksApi(self.client), + "file_client": self.file_client, + "remote_api": RemotesFileApi(self.file_client), + "repo_api": RepositoriesFileApi(self.file_client), + } + self.admin_info["a_remote"] = self.admin_info["remote_api"].create( + gen_file_remote(policy="on_demand") + ) + self.admin_info["a_repo"] = self.admin_info["repo_api"].create(gen_repo()) + self.admin_info["sync_data"] = RepositorySyncURL( + remote=self.admin_info["a_remote"].pulp_href + ) + + self.new_user = gen_user() + file_client = gen_file_client() + self.user_info = { + "task_api": TasksApi(self.client), + "file_client": file_client, + "remote_api": RemotesFileApi(file_client), + "repo_api": RepositoriesFileApi(file_client), + } + self.user_info["a_remote"] = self.user_info["remote_api"].create( + gen_file_remote(policy="on_demand") + ) + self.user_info["a_repo"] = self.user_info["repo_api"].create(gen_repo()) + self.user_info["sync_data"] = RepositorySyncURL(remote=self.user_info["a_remote"].pulp_href) + + def tearDown(self): + self.admin_info["remote_api"].delete(self.admin_info["a_remote"].pulp_href) + self.admin_info["repo_api"].delete(self.admin_info["a_repo"].pulp_href) + self.user_info["remote_api"].delete(self.user_info["a_remote"].pulp_href) + self.user_info["repo_api"].delete(self.user_info["a_repo"].pulp_href) + del_user(self.new_user) + + def testUserCannotPurge(self): + """ + Test that purge does NOT purge tasks NOT OWNED by caller. + """ + # Sync as admin + sync_response = self.admin_info["repo_api"].sync( + self.admin_info["a_repo"].pulp_href, self.admin_info["sync_data"] + ) + task = monitor_task(sync_response.task) + self.assertEqual(task.state, "completed") + + # Purge as user + states = list(TASK_FINAL_STATES) + dta = Purge(finished_before=TOMORROW_STR, states=states) + response = self.user_info["task_api"].purge(dta) + task = monitor_task(response.task) + + # Make sure sync-task (executed by admin) still exists + self.admin_info["task_api"].read(task.pulp_href) + + def testUserCanPurge(self) -> None: + """ + Test that purge DOES purge tasks owned by caller. + """ + # Sync as user + sync_response = self.user_info["repo_api"].sync( + self.user_info["a_repo"].pulp_href, self.user_info["sync_data"] + ) + sync_task = monitor_task(sync_response.task) + self.assertEqual(sync_task.state, "completed") + + # Purge as user + states = list(TASK_FINAL_STATES) + dta = Purge(finished_before=TOMORROW_STR, states=states) + response = self.user_info["task_api"].purge(dta) + monitor_task(response.task) + + # Make sure task DOES NOT exist + with self.assertRaises(ApiException): + self.admin_info["task_api"].read(sync_task.pulp_href) + + def testAdminCanPurge(self): + """ + Test that admin can ALWAYS purge. + """ + # Sync as user + sync_response = self.user_info["repo_api"].sync( + self.user_info["a_repo"].pulp_href, self.user_info["sync_data"] + ) + sync_task = monitor_task(sync_response.task) + self.assertEqual(sync_task.state, "completed") + + # Purge as ADMIN + states = list(TASK_FINAL_STATES) + dta = Purge(finished_before=TOMORROW_STR, states=states) + response = self.admin_info["task_api"].purge(dta) + monitor_task(response.task) + + # Make sure task DOES NOT exist + with self.assertRaises(ApiException): + self.admin_info["task_api"].read(sync_task.pulp_href) diff --git a/pulp_file/tests/functional/api/from_pulpcore/test_reclaim_disk_space.py b/pulp_file/tests/functional/api/from_pulpcore/test_reclaim_disk_space.py new file mode 100644 index 000000000..dee224607 --- /dev/null +++ b/pulp_file/tests/functional/api/from_pulpcore/test_reclaim_disk_space.py @@ -0,0 +1,193 @@ +"""Tests that perform actions over reclaim disk space.""" +from pulp_smash import config +from pulp_smash.pulp3.bindings import monitor_task, PulpTestCase +from pulp_smash.pulp3.utils import ( + gen_repo, + get_content, + gen_distribution, + download_content_unit, +) +from pulpcore.client.pulpcore import ( + ArtifactsApi, + OrphansCleanupApi, + RepositoriesReclaimSpaceApi, + RepositoriesApi, +) +from pulpcore.client.pulp_file import ( + FileFilePublication, + PublicationsFileApi, + RepositoriesFileApi, + RepositorySyncURL, + RemotesFileApi, + DistributionsFileApi, +) + +from pulp_file.tests.functional.utils import ( + gen_file_client, + gen_file_remote, + gen_pulpcore_client, +) +from .constants import FILE_CONTENT_NAME + + +class ReclaimSpaceTestCase(PulpTestCase): + """ + Test whether repository content can be reclaimed. + Subsequently, confirm that artifact is correctly re-downloaded in sync + task or when streamed to the client (this is true only for synced content, not uploaded.) + """ + + @classmethod + def setUpClass(cls): + """Create class-wide variables.""" + cls.cfg = config.get_config() + cls.client = gen_file_client() + core_client = gen_pulpcore_client() + cls.orphans_api = OrphansCleanupApi(core_client) + cls.reclaim_api = RepositoriesReclaimSpaceApi(core_client) + cls.artifacts_api = ArtifactsApi(core_client) + cls.all_repo_api = RepositoriesApi(core_client) + cls.publication_api = PublicationsFileApi(cls.client) + cls.distributions_api = DistributionsFileApi(cls.client) + cls.repo_api = RepositoriesFileApi(cls.client) + cls.remote_api = RemotesFileApi(cls.client) + + orphans_response = cls.orphans_api.cleanup({"orphan_protection_time": 0}) + monitor_task(orphans_response.task) + + def tearDown(self): + """Clean created resources.""" + # Runs any delete tasks and waits for them to complete + self.doCleanups() + orphans_response = self.orphans_api.cleanup({"orphan_protection_time": 0}) + monitor_task(orphans_response.task) + + def test_reclaim_immediate_content(self): + """ + Test whether immediate repository content can be reclaimed + and then re-populated back after sync. + """ + repo = self.repo_api.create(gen_repo()) + self.addCleanup(self.repo_api.delete, repo.pulp_href) + + remote = self.remote_api.create(gen_file_remote()) + self.addCleanup(self.remote_api.delete, remote.pulp_href) + + # sync the repository with immediate policy + repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) + sync_response = self.repo_api.sync(repo.pulp_href, repository_sync_data) + monitor_task(sync_response.task) + + # reclaim disk space + reclaim_response = self.reclaim_api.reclaim({"repo_hrefs": [repo.pulp_href]}) + monitor_task(reclaim_response.task) + + # assert no artifacts left + artifacts = self.artifacts_api.list().count + self.assertEqual(artifacts, 0) + + # sync repo again + repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) + sync_response = self.repo_api.sync(repo.pulp_href, repository_sync_data) + monitor_task(sync_response.task) + + # assert re-sync populated missing artifacts + artifacts = self.artifacts_api.list().count + self.assertGreater(artifacts, 0) + self.addCleanup(self.orphans_api.cleanup, {"orphan_protection_time": 0}) + + def test_reclaim_on_demand_content(self): + """ + Test whether on_demand repository content can be reclaimed + and then re-populated back after client request. + """ + repo, distribution = self._repo_sync_distribute(policy="on_demand") + + artifacts_before_download = self.artifacts_api.list().count + content = get_content(repo.to_dict())[FILE_CONTENT_NAME][0] + download_content_unit(self.cfg, distribution.to_dict(), content["relative_path"]) + + artifacts = self.artifacts_api.list().count + self.assertGreater(artifacts, artifacts_before_download) + + # reclaim disk space + reclaim_response = self.reclaim_api.reclaim({"repo_hrefs": [repo.pulp_href]}) + monitor_task(reclaim_response.task) + + artifacts_after_reclaim = self.artifacts_api.list().count + content = get_content(repo.to_dict())[FILE_CONTENT_NAME] + download_content_unit(self.cfg, distribution.to_dict(), content[0]["relative_path"]) + + artifacts = self.artifacts_api.list().count + self.assertGreater(artifacts, artifacts_after_reclaim) + + def test_immediate_reclaim_becomes_on_demand(self): + """Tests if immediate content becomes like on_demand content after reclaim.""" + repo, distribution = self._repo_sync_distribute() + + artifacts_before_reclaim = self.artifacts_api.list().count + self.assertGreater(artifacts_before_reclaim, 0) + content = get_content(repo.to_dict())[FILE_CONTENT_NAME][0] + # Populate cache + download_content_unit(self.cfg, distribution.to_dict(), content["relative_path"]) + + reclaim_response = self.reclaim_api.reclaim({"repo_hrefs": [repo.pulp_href]}) + monitor_task(reclaim_response.task) + + artifacts_after_reclaim = self.artifacts_api.list().count + self.assertLess(artifacts_after_reclaim, artifacts_before_reclaim) + + download_content_unit(self.cfg, distribution.to_dict(), content["relative_path"]) + artifacts_after_download = self.artifacts_api.list().count + # Downloading a reclaimed content will increase the artifact count by 1 + self.assertEqual(artifacts_after_download, artifacts_after_reclaim + 1) + # But only 1 extra artifact will be downloaded, so still less than after immediate sync + self.assertLess(artifacts_after_download, artifacts_before_reclaim) + + def test_specified_all_repos(self): + """Tests that specifying all repos w/ '*' properly grabs all the repos.""" + repos = [self.repo_api.create(gen_repo()) for _ in range(10)] + for repo in repos: + self.addCleanup(self.repo_api.delete, repo.pulp_href) + + repos = [r.pulp_href for r in self.all_repo_api.list().results] + + reclaim_response = self.reclaim_api.reclaim({"repo_hrefs": ["*"]}) + task_status = monitor_task(reclaim_response.task) + + repos_locked = [r.split(":")[-1] for r in task_status.reserved_resources_record] + self.assertEqual(len(repos), len(repos_locked)) + self.assertEqual(set(repos), set(repos_locked)) + + def _repo_sync_distribute(self, policy="immediate"): + """Helper to create & populate a repository and distribute it.""" + repo = self.repo_api.create(gen_repo()) + self.addCleanup(self.repo_api.delete, repo.pulp_href) + + # sync the repository with passed in policy + body = gen_file_remote(**{"policy": policy}) + remote = self.remote_api.create(body) + self.addCleanup(self.remote_api.delete, remote.pulp_href) + + # sync repo + repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) + sync_response = self.repo_api.sync(repo.pulp_href, repository_sync_data) + monitor_task(sync_response.task) + repo = self.repo_api.read(repo.pulp_href) + + # Publication + publication_data = FileFilePublication(repository=repo.pulp_href) + publication_response = self.publication_api.create(publication_data) + task_response = monitor_task(publication_response.task) + publication = self.publication_api.read(task_response.created_resources[0]) + self.addCleanup(self.publication_api.delete, publication.pulp_href) + + # Distribution + body = gen_distribution() + body["publication"] = publication.pulp_href + distribution_response = self.distributions_api.create(body) + created_resources = monitor_task(distribution_response.task).created_resources + distribution = self.distributions_api.read(created_resources[0]) + self.addCleanup(self.distributions_api.delete, distribution.pulp_href) + + return repo, distribution diff --git a/pulp_file/tests/functional/api/from_pulpcore/test_remove_plugin.py b/pulp_file/tests/functional/api/from_pulpcore/test_remove_plugin.py new file mode 100644 index 000000000..f52a235e3 --- /dev/null +++ b/pulp_file/tests/functional/api/from_pulpcore/test_remove_plugin.py @@ -0,0 +1,40 @@ +import pytest + +from pulp_smash.pulp3.utils import gen_repo + + +# Marking test trylast to ensure other tests run even if this fails. +@pytest.mark.nightly +@pytest.mark.trylast +def test_remove_plugin( + cli_client, + delete_orphans_pre, + file_fixture_gen_file_repo, + file_repo_api_client, + start_and_check_services, + stop_and_check_services, +): + repo_name = "repo for plugin removal test" + file_repo_pre_removal = file_repo_api_client.create(gen_repo(name=repo_name)) + + assert stop_and_check_services() is True + + res = cli_client.run(["pulpcore-manager", "remove-plugin", "file"]) + assert "Successfully removed" in res.stdout + num_migrations = res.stdout.count("Unapplying file.") + num_models = res.stdout.count("Removing model") + + # Without uninstalling the package just run migrations again to mimic the reinstallation + # of a plugin at least from pulp's perspective + res = cli_client.run(["pulpcore-manager", "migrate", "file"]) + assert res.stdout.count("Applying file.") == num_migrations + # This assumes each model gets its own access policy plus FileRepositoryVersion + assert res.stdout.count("created.") == num_models + 1 + + assert start_and_check_services() is True + + # create a repo with the same name as before the removal + file_repo_post_reinstall = file_fixture_gen_file_repo(name=repo_name) + + assert file_repo_pre_removal.name == file_repo_post_reinstall.name + assert file_repo_pre_removal.pulp_href != file_repo_post_reinstall.pulp_href diff --git a/pulp_file/tests/functional/api/from_pulpcore/test_repair.py b/pulp_file/tests/functional/api/from_pulpcore/test_repair.py new file mode 100644 index 000000000..174d82d52 --- /dev/null +++ b/pulp_file/tests/functional/api/from_pulpcore/test_repair.py @@ -0,0 +1,198 @@ +import os +import unittest +from random import sample +from urllib.parse import urljoin + +from pulp_smash import api, cli, config, utils +from pulp_smash.pulp3.bindings import delete_orphans +from pulp_smash.pulp3.constants import BASE_PATH +from pulp_smash.pulp3.utils import ( + gen_repo, + get_content, + get_versions, + sync, +) + +from pulp_file.tests.functional.utils import gen_file_remote +from .constants import ( + FILE_CONTENT_NAME, + FILE_REMOTE_PATH, + FILE_REPO_PATH, +) + + +REPAIR_PATH = urljoin(BASE_PATH, "repair/") + + +SUPPORTED_STORAGE_FRAMEWORKS = [ + "django.core.files.storage.FileSystemStorage", + "pulpcore.app.models.storage.FileSystem", +] + + +class ArtifactRepairTestCase(unittest.TestCase): + """Test whether missing and corrupted artifact files can be redownloaded.""" + + @classmethod + def setUpClass(cls): + """Create class-wide variables.""" + cls.cfg = config.get_config() + cls.api_client = api.Client(cls.cfg, api.smart_handler) + cls.cli_client = cli.Client(cls.cfg) + + storage = utils.get_pulp_setting(cls.cli_client, "DEFAULT_FILE_STORAGE") + if storage not in SUPPORTED_STORAGE_FRAMEWORKS: + raise unittest.SkipTest( + "Cannot simulate bit-rot on this storage platform ({}).".format(storage), + ) + + def setUp(self): + """Initialize Pulp with some content for our repair tests. + + 1. Create and sync a repo. + 2. Select two content units from the repo, delete one artifact and corrupt another. + """ + # STEP 1 + delete_orphans() + repo = self.api_client.post(FILE_REPO_PATH, gen_repo()) + self.addCleanup(self.api_client.delete, repo["pulp_href"]) + + body = gen_file_remote() + remote = self.api_client.post(FILE_REMOTE_PATH, body) + self.addCleanup(self.api_client.delete, remote["pulp_href"]) + + sync(self.cfg, remote, repo) + repo = self.api_client.get(repo["pulp_href"]) + + # STEP 2 + media_root = utils.get_pulp_setting(self.cli_client, "MEDIA_ROOT") + content1, content2 = sample(get_content(repo)[FILE_CONTENT_NAME], 2) + # Muddify one artifact on disk. + artifact1_path = os.path.join(media_root, self.api_client.get(content1["artifact"])["file"]) + cmd1 = ("sed", "-i", "-e", r"$a bit rot", artifact1_path) + self.cli_client.run(cmd1, sudo=True) + # Delete another one from disk. + artifact2_path = os.path.join(media_root, self.api_client.get(content2["artifact"])["file"]) + cmd2 = ("rm", artifact2_path) + self.cli_client.run(cmd2, sudo=True) + + self.repo = repo + + def _verify_repair_results(self, result, missing=0, corrupted=0, repaired=0): + """Parse the repair task output and confirm it matches expectations.""" + progress_reports = {report["code"]: report for report in result["progress_reports"]} + + corrupted_units_report = progress_reports["repair.corrupted"] + self.assertEqual(corrupted_units_report["done"], corrupted, corrupted_units_report) + + missing_units_report = progress_reports["repair.missing"] + self.assertEqual(missing_units_report["done"], missing, missing_units_report) + + repaired_units_report = progress_reports["repair.repaired"] + self.assertEqual(repaired_units_report["done"], repaired, repaired_units_report) + + def test_repair_global_with_checksums(self): + """Test whether missing and corrupted files can be redownloaded. + + Do the following: + + 3. Perform Pulp repair, including checksum verification. + 4. Assert that the repair task reported two corrupted and two repaired units. + 5. Repeat the Pulp repair operation. + 6. Assert that the repair task reported no missing, corrupted or repaired units. + """ + # STEP 3 + result = self.api_client.post(REPAIR_PATH, {"verify_checksums": True}) + + # STEP 4 + self._verify_repair_results(result, missing=1, corrupted=1, repaired=2) + + # STEP 5 + result = self.api_client.post(REPAIR_PATH, {"verify_checksums": True}) + + # STEP 6 + self._verify_repair_results(result) + + def test_repair_global_without_checksums(self): + """Test whether missing files can be redownloaded. + + Do the following: + + 3. Perform Pulp repair, not including checksum verification. + 4. Assert that the repair task reported one missing and one repaired unit. + 5. Repeat the Pulp repair operation. + 6. Assert that the repair task reported no missing, corrupted or repaired units. + 7. Repeat the Pulp repair operation, this time including checksum verification. + 8. Assert that the repair task reported one corrupted and one repaired unit. + """ + # STEP 3 + result = self.api_client.post(REPAIR_PATH, {"verify_checksums": False}) + + # STEP 4 + self._verify_repair_results(result, missing=1, repaired=1) + + # STEP 5 + result = self.api_client.post(REPAIR_PATH, {"verify_checksums": False}) + + # STEP 6 + self._verify_repair_results(result) + + # STEP 7 + result = self.api_client.post(REPAIR_PATH, {"verify_checksums": True}) + + # STEP 8 + self._verify_repair_results(result, corrupted=1, repaired=1) + + def test_repair_repository_version_with_checksums(self): + """Test whether corrupted files can be redownloaded. + + Do the following: + + 3. Repair the RepositoryVersion. + 4. Assert that the repair task reported two corrupted and two repaired units. + 5. Repeat the RepositoryVersion repair operation. + 6. Assert that the repair task reported no missing, corrupted or repaired units. + """ + # STEP 3 + latest_version = get_versions(self.repo)[-1]["pulp_href"] + result = self.api_client.post(latest_version + "repair/", {"verify_checksums": True}) + + # STEP 4 + self._verify_repair_results(result, missing=1, corrupted=1, repaired=2) + + # STEP 5 + result = self.api_client.post(latest_version + "repair/", {"verify_checksums": True}) + + # STEP 6 + self._verify_repair_results(result) + + def test_repair_repository_version_without_checksums(self): + """Test whether missing files can be redownloaded. + + Do the following: + + 3. Repair the RepositoryVersion, not including checksum verification. + 4. Assert that the repair task reported one missing and one repaired unit. + 5. Repeat the RepositoryVersion repair operation. + 6. Assert that the repair task reported no missing, corrupted or repaired units. + 7. Repeat the RepositoryVersion repair operation, this time including checksum verification + 8. Assert that the repair task reported one corrupted and one repaired unit. + """ + # STEP 3 + latest_version = get_versions(self.repo)[-1]["pulp_href"] + result = self.api_client.post(latest_version + "repair/", {"verify_checksums": False}) + + # STEP 4 + self._verify_repair_results(result, missing=1, repaired=1) + + # STEP 5 + result = self.api_client.post(REPAIR_PATH, {"verify_checksums": False}) + + # STEP 6 + self._verify_repair_results(result) + + # STEP 7 + result = self.api_client.post(latest_version + "repair/", {"verify_checksums": True}) + + # STEP 8 + self._verify_repair_results(result, corrupted=1, repaired=1) diff --git a/pulp_file/tests/functional/api/from_pulpcore/test_repo_versions.py b/pulp_file/tests/functional/api/from_pulpcore/test_repo_versions.py new file mode 100644 index 000000000..67f815fc8 --- /dev/null +++ b/pulp_file/tests/functional/api/from_pulpcore/test_repo_versions.py @@ -0,0 +1,1379 @@ +"""Tests related to repository versions.""" +import unittest +import pytest +from random import choice, randint, sample +from time import sleep +from urllib.parse import urlsplit +from tempfile import NamedTemporaryFile +from hashlib import sha256 + +from pulp_smash import api, config, utils +from pulp_smash.exceptions import TaskReportError +from pulp_smash.pulp3.bindings import delete_orphans, monitor_task +from pulp_smash.pulp3.constants import ARTIFACTS_PATH +from pulp_smash.pulp3.utils import ( + download_content_unit, + delete_version, + gen_repo, + gen_distribution, + get_added_content, + get_added_content_summary, + get_artifact_paths, + get_content, + get_content_summary, + get_removed_content, + get_removed_content_summary, + get_versions, + modify_repo, + sync, +) +from requests.exceptions import HTTPError + +from pulpcore.client.pulpcore import ApiClient as CoreApiClient +from pulpcore.client.pulpcore import RepositoryVersionsApi +from pulpcore.client.pulp_file import ( + ContentFilesApi, + DistributionsFileApi, + PublicationsFileApi, + RemotesFileApi, + RepositoriesFileApi, + RepositoriesFileVersionsApi, + RepositorySyncURL, +) +from pulpcore.client.pulp_file.exceptions import ApiException + +from pulp_file.tests.functional.utils import ( + create_distribution, + create_file_publication, + gen_file_client, + gen_file_remote, + populate_pulp, +) +from .constants import ( + FILE2_FIXTURE_MANIFEST_URL, + FILE_CONTENT_NAME, + FILE_CONTENT_PATH, + FILE_DISTRIBUTION_PATH, + FILE_FIXTURE_COUNT, + FILE_FIXTURE_MANIFEST_URL, + FILE_FIXTURE_SUMMARY, + FILE_LARGE_FIXTURE_MANIFEST_URL, + FILE_MANY_FIXTURE_MANIFEST_URL, + FILE_REMOTE_PATH, + FILE_REPO_PATH, + FILE_URL, + FILE2_URL, +) + + +def remove_created_key(dic): + """Given a dict remove the key `created`.""" + return {k: v for k, v in dic.items() if k != "created"} + + +class AddRemoveContentTestCase(unittest.TestCase): + """Add and remove content to a repository. Verify side-effects. + + A new repository version is automatically created each time content is + added to or removed from a repository. Furthermore, it's possible to + inspect any repository version and discover which content is present, which + content was removed, and which content was added. This test case explores + these features. + """ + + @classmethod + def setUpClass(cls): + cls.cfg = config.get_config() + cls.client = api.Client(cls.cfg, api.page_handler) + + def setUp(self): + self.remote = {} + self.repo = {} + self.content = {} + + def tearDown(self): + if self.remote: + self.client.delete(self.remote["pulp_href"]) + if self.repo: + self.client.delete(self.repo["pulp_href"]) + + def test_workflow(self): + self._create_repository() + self._sync_content() + self._remove_content() + self._add_content() + + def _create_repository(self): + """Create a repository. + + Assert that: + + * The ``versions_href`` API call is correct. + * The ``latest_version_href`` API call is correct. + """ + self.repo.update(self.client.post(FILE_REPO_PATH, gen_repo())) + + repo_versions = get_versions(self.repo) + self.assertEqual(len(repo_versions), 1, repo_versions) + + self.assertEqual(self.repo["latest_version_href"], f"{self.repo['pulp_href']}versions/0/") + + def _sync_content(self): + """Sync content into the repository. + + Assert that: + + * The ``versions_href`` API call is correct. + * The ``latest_version_href`` API call is correct. + * The ``content_hrefs`` attribute is correct. + * The ``content_added_hrefs`` attribute is correct. + * The ``content_removed_hrefs`` attribute is correct. + * The ``content_summary`` attribute is correct. + * The ``content_added_summary`` attribute is correct. + * The ``content_removed_summary`` attribute is correct. + """ + body = gen_file_remote() + body.update({"headers": [{"Connection": "keep-alive"}]}) + self.remote.update(self.client.post(FILE_REMOTE_PATH, body)) + sync(self.cfg, self.remote, self.repo) + repo = self.client.get(self.repo["pulp_href"]) + + repo_versions = get_versions(repo) + self.assertEqual(len(repo_versions), 2, repo_versions) + + self.assertIsNotNone(repo["latest_version_href"]) + + content_hrefs = get_content(repo)[FILE_CONTENT_NAME] + self.assertEqual(len(content_hrefs), FILE_FIXTURE_COUNT, content_hrefs) + + content = get_content(repo)[FILE_CONTENT_NAME] + self.assertEqual(len(content), FILE_FIXTURE_COUNT) + + content_added = get_added_content(repo)[FILE_CONTENT_NAME] + self.assertEqual(len(content_added), FILE_FIXTURE_COUNT) + + content_removed = get_removed_content(repo)[FILE_CONTENT_NAME] + self.assertEqual(len(content_removed), 0) + + content_summary = get_content_summary(repo) + self.assertDictEqual(content_summary, FILE_FIXTURE_SUMMARY) + + content_added_summary = get_added_content_summary(repo) + self.assertDictEqual(content_added_summary, FILE_FIXTURE_SUMMARY) + + content_removed_summary = get_removed_content_summary(repo) + self.assertDictEqual(content_removed_summary, {}) + + def _remove_content(self): + """Remove content from the repository. + + Make roughly the same assertions as :meth:`test_02_sync_content`. + """ + repo = self.client.get(self.repo["pulp_href"]) + self.content.update(choice(get_content(repo)[FILE_CONTENT_NAME])) + + modify_repo(self.cfg, self.repo, remove_units=[self.content]) + repo = self.client.get(self.repo["pulp_href"]) + + repo_versions = get_versions(repo) + self.assertEqual(len(repo_versions), 3, repo_versions) + + self.assertIsNotNone(repo["latest_version_href"]) + + content_hrefs = get_content(repo)[FILE_CONTENT_NAME] + self.assertEqual(len(content_hrefs), FILE_FIXTURE_COUNT - 1, content_hrefs) + + content = get_content(repo)[FILE_CONTENT_NAME] + self.assertEqual(len(content), FILE_FIXTURE_COUNT - 1) + + added_content = get_added_content(repo)[FILE_CONTENT_NAME] + self.assertListEqual(added_content, [], added_content) + + removed_content = get_removed_content(repo)[FILE_CONTENT_NAME] + self.assertListEqual(removed_content, [self.content], removed_content) + + content_summary = get_content_summary(repo) + self.assertDictEqual(content_summary, {FILE_CONTENT_NAME: FILE_FIXTURE_COUNT - 1}) + + content_added_summary = get_added_content_summary(repo) + self.assertDictEqual(content_added_summary, {}) + + content_removed_summary = get_removed_content_summary(repo) + self.assertDictEqual(content_removed_summary, {FILE_CONTENT_NAME: 1}) + + def _add_content(self): + """Add content to the repository. + + Make roughly the same assertions as :meth:`test_02_sync_content`. + """ + repo = self.client.get(self.repo["pulp_href"]) + modify_repo(self.cfg, self.repo, add_units=[self.content]) + repo = self.client.get(self.repo["pulp_href"]) + + repo_versions = get_versions(repo) + self.assertEqual(len(repo_versions), 4, repo_versions) + + self.assertIsNotNone(repo["latest_version_href"]) + + content_hrefs = get_content(repo)[FILE_CONTENT_NAME] + self.assertEqual(len(content_hrefs), FILE_FIXTURE_COUNT, content_hrefs) + + content = get_content(repo)[FILE_CONTENT_NAME] + self.assertEqual(len(content), FILE_FIXTURE_COUNT) + + added_content = get_added_content(repo)[FILE_CONTENT_NAME] + self.assertListEqual(added_content, [self.content], added_content) + + removed_content = get_removed_content(repo)[FILE_CONTENT_NAME] + self.assertListEqual(removed_content, [], removed_content) + + content_summary = get_content_summary(repo) + self.assertDictEqual(content_summary, FILE_FIXTURE_SUMMARY) + + content_added_summary = get_added_content_summary(repo) + self.assertDictEqual(content_added_summary, {FILE_CONTENT_NAME: 1}) + + content_removed_summary = get_removed_content_summary(repo) + self.assertDictEqual(content_removed_summary, {}) + + def get_content_summary(self, repo): + """Get the ``content_summary`` for the given repository.""" + repo_versions = get_versions(repo) + content_summaries = [ + repo_version["content_summary"] + for repo_version in repo_versions + if repo_version["pulp_href"] == repo["latest_version_href"] + ] + self.assertEqual(len(content_summaries), 1, content_summaries) + return content_summaries[0] + + +class SyncChangeRepoVersionTestCase(unittest.TestCase): + """Verify whether sync of repository updates repository version.""" + + def test_all(self): + """Verify whether the sync of a repository updates its version. + + This test explores the design choice stated in the `Pulp #3308`_ that a + new repository version is created even if the sync does not add or + remove any content units. Even without any changes to the remote if a + new sync occurs, a new repository version is created. + + .. _Pulp #3308: https://pulp.plan.io/issues/3308 + + Do the following: + + 1. Create a repository, and a remote. + 2. Sync the repository. + 3. Remove all content - one by one. + 3. Verify that the repository version is equal to the number of operations. + """ + cfg = config.get_config() + client = api.Client(cfg, api.json_handler) + + repo = client.post(FILE_REPO_PATH, gen_repo()) + self.addCleanup(client.delete, repo["pulp_href"]) + + body = gen_file_remote() + remote = client.post(FILE_REMOTE_PATH, body) + self.addCleanup(client.delete, remote["pulp_href"]) + + sync(cfg, remote, repo) + repo = client.get(repo["pulp_href"]) + for file_content in get_content(repo)[FILE_CONTENT_NAME]: + modify_repo(cfg, repo, remove_units=[file_content]) + repo = client.get(repo["pulp_href"]) + path = urlsplit(repo["latest_version_href"]).path + latest_repo_version = int(path.split("/")[-2]) + self.assertEqual(latest_repo_version, 4) + + +class AddRemoveRepoVersionTestCase(unittest.TestCase): + """Create and delete repository versions. + + This test targets the following issues: + + * `Pulp #3219 `_ + * `Pulp Smash #871 `_ + """ + + # `cls.content[i]` is a dict. + # pylint:disable=unsubscriptable-object + + @classmethod + def setUpClass(cls): + """Add content to Pulp.""" + cls.cfg = config.get_config() + cls.client = api.Client(cls.cfg, api.json_handler) + delete_orphans() + populate_pulp(cls.cfg, url=FILE_LARGE_FIXTURE_MANIFEST_URL) + # We need at least three content units. Choosing a relatively low + # number is useful, to limit how many repo versions are created, and + # thus how long the test takes. + cls.content = sample(cls.client.get(FILE_CONTENT_PATH)["results"], 4) + + def setUp(self): + """Create a repository and give it nine new versions.""" + self.repo = self.client.post(FILE_REPO_PATH, gen_repo()) + self.addCleanup(self.client.delete, self.repo["pulp_href"]) + + # Don't upload the last content unit. The test case might upload it to + # create a new repo version within the test. + for content in self.content[:-1]: + self.client.post( + self.repo["pulp_href"] + "modify/", {"add_content_units": [content["pulp_href"]]} + ) + self.repo = self.client.get(self.repo["pulp_href"]) + self.repo_version_hrefs = tuple(version["pulp_href"] for version in get_versions(self.repo)) + + def test_delete_first_version(self): + """Delete the first repository version (version 0).""" + delete_version(self.repo, self.repo_version_hrefs[0]) + + def test_delete_last_version(self): + """Delete the last repository version. + + Create a new repository version from the second-to-last repository + version. Verify that the content unit from the old last repository + version is not in the new last repository version. + """ + # Delete the last repo version. + delete_version(self.repo, self.repo_version_hrefs[-1]) + with self.assertRaises(HTTPError): + get_content(self.repo, self.repo_version_hrefs[-1]) + + # Make new repo version from new last repo version. + self.client.post( + self.repo["pulp_href"] + "modify/", + {"add_content_units": [self.content[-1]["pulp_href"]]}, + ) + self.repo = self.client.get(self.repo["pulp_href"]) + artifact_paths = get_artifact_paths(self.repo) + + self.assertNotIn(self.content[-2]["artifact"], artifact_paths) + self.assertIn(self.content[-1]["artifact"], artifact_paths) + + def test_delete_middle_version(self): + """Delete a middle version.""" + index = randint(1, len(self.repo_version_hrefs) - 3) + delete_version(self.repo, self.repo_version_hrefs[index]) + + with self.assertRaises(HTTPError): + get_content(self.repo, self.repo_version_hrefs[index]) + + # Check added count is updated properly + added = get_added_content_summary(self.repo, self.repo_version_hrefs[index + 1]) + self.assertEqual(added["file.file"], 2) + + for repo_version_href in self.repo_version_hrefs[index + 1 :]: + artifact_paths = get_artifact_paths(self.repo, repo_version_href) + self.assertIn(self.content[index]["artifact"], artifact_paths) + + def test_delete_all_versions(self): + """Attempt to delete all versions.""" + for repo_version_href in self.repo_version_hrefs[:-1]: + delete_version(self.repo, repo_version_href) + + with self.assertRaises(TaskReportError) as ctx: + delete_version(self.repo, self.repo_version_hrefs[-1]) + + self.assertIn( + "Cannot delete repository version.", ctx.exception.task["error"]["description"] + ) + + def test_delete_publication(self): + """Delete a publication. + + Delete a repository version, and verify the associated publication is + also deleted. + """ + publication = create_file_publication(self.cfg, self.repo) + delete_version(self.repo) + + with self.assertRaises(HTTPError): + self.client.get(publication["pulp_href"]) + + +@pytest.mark.parallel +def test_squash_repo_version( + file_repo_api_client, file_repo_ver_api_client, file_content_api_client, file_repo +): + """Test that the deletion of a repository version properly squashes the content. + + - Setup versions like: + Version 0: + add: ABCDE + Version 1: ABCDE + delete: BCDE; add: FGHI + Version 2: AFGHI -- to be deleted + delete: GI; add: CD + Version 3: ACDFH -- to be squashed into + delete: DH; add: EI + Version 4: ACEFI + - Delete version 2. + - Check the content of all remaining versions. + """ + content_units = {} + for name in ["A", "B", "C", "D", "E", "F", "G", "H", "I"]: + try: + content_units[name] = file_content_api_client.list( + relative_path=name, sha256=sha256(name.encode()).hexdigest() + ).results[0] + except IndexError: + with NamedTemporaryFile() as tf: + tf.write(name.encode()) + tf.flush() + response = file_content_api_client.create(relative_path=name, file=tf.name) + result = monitor_task(response.task) + content_units[name] = file_content_api_client.read(result.created_resources[0]) + response1 = file_repo_api_client.modify( + file_repo.pulp_href, + { + "add_content_units": [ + content.pulp_href + for key, content in content_units.items() + if key in ["A", "B", "C", "D", "E"] + ] + }, + ) + + response2 = file_repo_api_client.modify( + file_repo.pulp_href, + { + "remove_content_units": [ + content.pulp_href + for key, content in content_units.items() + if key in ["B", "C", "D", "E"] + ], + "add_content_units": [ + content.pulp_href + for key, content in content_units.items() + if key in ["F", "G", "H", "I"] + ], + }, + ) + + response3 = file_repo_api_client.modify( + file_repo.pulp_href, + { + "remove_content_units": [ + content.pulp_href for key, content in content_units.items() if key in ["G", "I"] + ], + "add_content_units": [ + content.pulp_href for key, content in content_units.items() if key in ["C", "D"] + ], + }, + ) + + response4 = file_repo_api_client.modify( + file_repo.pulp_href, + { + "remove_content_units": [ + content.pulp_href for key, content in content_units.items() if key in ["D", "H"] + ], + "add_content_units": [ + content.pulp_href for key, content in content_units.items() if key in ["E", "I"] + ], + }, + ) + version1 = file_repo_ver_api_client.read(monitor_task(response1.task).created_resources[0]) + version2 = file_repo_ver_api_client.read(monitor_task(response2.task).created_resources[0]) + version3 = file_repo_ver_api_client.read(monitor_task(response3.task).created_resources[0]) + version4 = file_repo_ver_api_client.read(monitor_task(response4.task).created_resources[0]) + + # Check version state before deletion + assert version1.content_summary.added["file.file"]["count"] == 5 + assert "file.file" not in version1.content_summary.removed + assert version2.content_summary.added["file.file"]["count"] == 4 + assert version2.content_summary.removed["file.file"]["count"] == 4 + assert version3.content_summary.added["file.file"]["count"] == 2 + assert version3.content_summary.removed["file.file"]["count"] == 2 + assert version4.content_summary.added["file.file"]["count"] == 2 + assert version4.content_summary.removed["file.file"]["count"] == 2 + + content1 = file_content_api_client.list(repository_version=version1.pulp_href) + content2 = file_content_api_client.list(repository_version=version2.pulp_href) + content3 = file_content_api_client.list(repository_version=version3.pulp_href) + content4 = file_content_api_client.list(repository_version=version4.pulp_href) + assert set((content.relative_path for content in content1.results)) == {"A", "B", "C", "D", "E"} + assert set((content.relative_path for content in content2.results)) == {"A", "F", "G", "H", "I"} + assert set((content.relative_path for content in content3.results)) == {"A", "C", "D", "F", "H"} + assert set((content.relative_path for content in content4.results)) == {"A", "C", "E", "F", "I"} + + monitor_task(file_repo_ver_api_client.delete(version2.pulp_href).task) + + # Check version state after deletion (Version 2 is gone...) + version1 = file_repo_ver_api_client.read(version1.pulp_href) + version3 = file_repo_ver_api_client.read(version3.pulp_href) + version4 = file_repo_ver_api_client.read(version4.pulp_href) + + assert version1.content_summary.added["file.file"]["count"] == 5 + assert "file.file" not in version1.content_summary.removed + assert version3.content_summary.added["file.file"]["count"] == 2 + assert version3.content_summary.removed["file.file"]["count"] == 2 + assert version4.content_summary.added["file.file"]["count"] == 2 + assert version4.content_summary.removed["file.file"]["count"] == 2 + + content1 = file_content_api_client.list(repository_version=version1.pulp_href) + content3 = file_content_api_client.list(repository_version=version3.pulp_href) + content4 = file_content_api_client.list(repository_version=version4.pulp_href) + assert set((content.relative_path for content in content1.results)) == {"A", "B", "C", "D", "E"} + assert set((content.relative_path for content in content3.results)) == {"A", "C", "D", "F", "H"} + assert set((content.relative_path for content in content4.results)) == {"A", "C", "E", "F", "I"} + + +class ContentImmutableRepoVersionTestCase(unittest.TestCase): + """Test whether the content present in a repo version is immutable. + + This test targets the following issue: + + * `Pulp Smash #953 `_ + """ + + def test_all(self): + """Test whether the content present in a repo version is immutable. + + Do the following: + + 1. Create a repository that has at least one repository version. + 2. Attempt to update the content of a repository version. + 3. Assert that an HTTP exception is raised. + 4. Assert that the repository version was not updated. + """ + cfg = config.get_config() + client = api.Client(cfg, api.json_handler) + + repo = client.post(FILE_REPO_PATH, gen_repo()) + self.addCleanup(client.delete, repo["pulp_href"]) + + body = gen_file_remote() + remote = client.post(FILE_REMOTE_PATH, body) + self.addCleanup(client.delete, remote["pulp_href"]) + + sync(cfg, remote, repo) + + latest_version_href = client.get(repo["pulp_href"])["latest_version_href"] + with self.assertRaises(HTTPError): + client.post(latest_version_href) + repo = client.get(repo["pulp_href"]) + self.assertEqual(latest_version_href, repo["latest_version_href"]) + + +class FilterRepoVersionTestCase(unittest.TestCase): + """Test whether repository versions can be filtered. + + These tests target the following issues: + + * `Pulp #3238 `_ + * `Pulp #3536 `_ + * `Pulp #3557 `_ + * `Pulp #3558 `_ + * `Pulp Smash #880 `_ + """ + + @classmethod + def setUpClass(cls): + """Create class-wide variables. + + Add content to Pulp. + """ + cls.cfg = config.get_config() + cls.client = api.Client(cls.cfg, api.json_handler) + + populate_pulp(cls.cfg) + cls.contents = cls.client.get(FILE_CONTENT_PATH)["results"] + + def setUp(self): + """Create a repository and give it new versions.""" + self.repo = self.client.post(FILE_REPO_PATH, gen_repo()) + self.addCleanup(self.client.delete, self.repo["pulp_href"]) + + for content in self.contents[:10]: # slice is arbitrary upper bound + modify_repo(self.cfg, self.repo, add_units=[content]) + sleep(1) + self.repo = self.client.get(self.repo["pulp_href"]) + + def test_filter_invalid_content(self): + """Filter repository version by invalid content.""" + with self.assertRaises(HTTPError): + get_versions(self.repo, {"content": utils.uuid4()}) + + def test_filter_valid_content(self): + """Filter repository versions by valid content.""" + content = choice(self.contents) + repo_versions = get_versions(self.repo, {"content": content["pulp_href"]}) + for repo_version in repo_versions: + self.assertIn( + self.client.get(content["pulp_href"]), + get_content(self.repo, repo_version["pulp_href"])[FILE_CONTENT_NAME], + ) + + def test_filter_invalid_date(self): + """Filter repository version by invalid date.""" + criteria = utils.uuid4() + for params in ( + {"pulp_created": criteria}, + {"pulp_created__gt": criteria, "pulp_created__lt": criteria}, + {"pulp_created__gte": criteria, "pulp_created__lte": criteria}, + {"pulp_created__range": ",".join((criteria, criteria))}, + ): + with self.subTest(params=params): + with self.assertRaises(HTTPError): + get_versions(self.repo, params) + + def test_filter_valid_date(self): + """Filter repository version by a valid date.""" + dates = self.get_repo_versions_attr("pulp_created") + for params, num_results in ( + ({"pulp_created": dates[0]}, 1), + ({"pulp_created__gt": dates[0], "pulp_created__lt": dates[-1]}, len(dates) - 2), + ({"pulp_created__gte": dates[0], "pulp_created__lte": dates[-1]}, len(dates)), + ({"pulp_created__range": ",".join((dates[0], dates[1]))}, 2), + ): + with self.subTest(params=params): + results = get_versions(self.repo, params) + self.assertEqual(len(results), num_results, results) + + def test_filter_nonexistent_version(self): + """Filter repository version by a nonexistent version number.""" + criteria = -1 + for params in ( + {"number": criteria}, + {"number__gt": criteria, "number__lt": criteria}, + {"number__gte": criteria, "number__lte": criteria}, + {"number__range": ",".join((str(criteria), str(criteria)))}, + ): + with self.subTest(params=params): + versions = get_versions(self.repo, params) + self.assertEqual(len(versions), 0, versions) + + def test_filter_invalid_version(self): + """Filter repository version by an invalid version number.""" + criteria = utils.uuid4() + for params in ( + {"number": criteria}, + {"number__gt": criteria, "number__lt": criteria}, + {"number__gte": criteria, "number__lte": criteria}, + {"number__range": ",".join((criteria, criteria))}, + ): + with self.subTest(params=params): + with self.assertRaises(HTTPError): + get_versions(self.repo, params) + + def test_filter_valid_version(self): + """Filter repository version by a valid version number.""" + numbers = self.get_repo_versions_attr("number") + for params, num_results in ( + ({"number": numbers[0]}, 1), + ({"number__gt": numbers[0], "number__lt": numbers[-1]}, len(numbers) - 2), + ({"number__gte": numbers[0], "number__lte": numbers[-1]}, len(numbers)), + ({"number__range": "{},{}".format(numbers[0], numbers[1])}, 2), + ): + with self.subTest(params=params): + results = get_versions(self.repo, params) + self.assertEqual(len(results), num_results, results) + + def test_deleted_version_filter(self): + """Delete a repository version and filter by its number.""" + numbers = self.get_repo_versions_attr("number") + delete_version(self.repo) + versions = get_versions(self.repo, {"number": numbers[-1]}) + self.assertEqual(len(versions), 0, versions) + + def get_repo_versions_attr(self, attr): + """Get an ``attr`` about each version of ``self.repo``. + + Return as sorted list. + """ + attributes = [version[attr] for version in get_versions(self.repo)] + attributes.sort() + return attributes + + +class CreatedResourcesTaskTestCase(unittest.TestCase): + """Verify whether task report shows that a repository version was created. + + This test targets the following issue: + + `Pulp Smash #876 `_. + """ + + def test_all(self): + """Verify whether task report shows repository version was created.""" + cfg = config.get_config() + client = api.Client(cfg, api.json_handler) + + repo = client.post(FILE_REPO_PATH, gen_repo()) + self.addCleanup(client.delete, repo["pulp_href"]) + + body = gen_file_remote() + remote = client.post(FILE_REMOTE_PATH, body) + self.addCleanup(client.delete, remote["pulp_href"]) + + call_report = sync(cfg, remote, repo) + for key in ("repositories", "versions"): + self.assertIn(key, call_report["pulp_href"], call_report) + + +class CreateRepoBaseVersionTestCase(unittest.TestCase): + """Test whether one can create a repository version from any version. + + This test targets the following issues: + + `Pulp #3360 `_ + `Pulp #4035 `_ + """ + + @classmethod + def setUpClass(cls): + """Create class-wide variables.""" + cls.cfg = config.get_config() + delete_orphans() + populate_pulp(cls.cfg, url=FILE_LARGE_FIXTURE_MANIFEST_URL) + cls.client = api.Client(cls.cfg, api.page_handler) + cls.content = cls.client.get(FILE_CONTENT_PATH) + + def test_same_repository(self): + """Test ``base_version`` for the same repository. + + Do the following: + + 1. Create a repository. + 2. Sync the repository (this creates repository version 1). + 3. Add a new content unit a new repository version (this create + repository version 2). + 4. Create a new repository version using version 1 as ``base_version`` + (this creates version 3). + 5. Check that version 1 and version 3 have the same content. + """ + # create repo version 1 + repo = self.create_sync_repo() + version_content = [] + version_content.append( + sorted( + [remove_created_key(item) for item in get_content(repo)[FILE_CONTENT_NAME]], + key=lambda item: item["pulp_href"], + ) + ) + self.assertIsNone(get_versions(repo)[1]["base_version"]) + + content = self.content.pop() + + # create repo version 2 + modify_repo(self.cfg, repo, add_units=[content]) + repo = self.client.get(repo["pulp_href"]) + + # create repo version 3 from version 1 + base_version = get_versions(repo)[1]["pulp_href"] + modify_repo(self.cfg, repo, base_version=base_version) + repo = self.client.get(repo["pulp_href"]) + + # assert that base_version of the version 3 points to version 1 + self.assertEqual(get_versions(repo)[3]["base_version"], base_version) + + # assert that content on version 1 is equal to content on version 3 + version_content.append( + sorted( + [remove_created_key(item) for item in get_content(repo)[FILE_CONTENT_NAME]], + key=lambda item: item["pulp_href"], + ) + ) + self.assertEqual(version_content[0], version_content[1], version_content) + + def test_different_repository(self): + """Test ``base_version`` for different repositories. + + Do the following: + + 1. Create a new repository A and sync it. + 2. Create a new repository B and a new version for this repository + specify repository A version 1 as the ``base_version``. + 3. Check that repository A version 1 and repository B version 1 have + the same content. + """ + # create repo A + repo = self.create_sync_repo() + version_content = [] + version_content.append( + sorted( + [remove_created_key(item) for item in get_content(repo)[FILE_CONTENT_NAME]], + key=lambda item: item["pulp_href"], + ) + ) + self.assertIsNone(get_versions(repo)[1]["base_version"]) + + # get repo A version 1 to be used as base_version + base_version = get_versions(repo)[1]["pulp_href"] + + # create repo B + repo = self.client.post(FILE_REPO_PATH, gen_repo()) + self.addCleanup(self.client.delete, repo["pulp_href"]) + + # create a version for repo B using repo A version 1 as base_version + modify_repo(self.cfg, repo, base_version=base_version) + repo = self.client.get(repo["pulp_href"]) + + # assert that base_version of repo B points to version 1 of repo A + self.assertEqual(get_versions(repo)[1]["base_version"], base_version) + + # assert that content on version 1 of repo A is equal to content on + # version 1 repo B + version_content.append( + sorted( + [remove_created_key(item) for item in get_content(repo)[FILE_CONTENT_NAME]], + key=lambda item: item["pulp_href"], + ) + ) + + self.assertEqual(version_content[0], version_content[1], version_content) + + def test_base_version_other_parameters(self): + """Test ``base_version`` can be used together with other parameters. + + ``add_content_units`` and ``remove_content_units``. + """ + # create repo version 1 + self.skipTest("Temporarily skipping while we figure out a better testing strategy.") + repo = self.create_sync_repo() + version_1_content = [ + remove_created_key(item) for item in get_content(repo)[FILE_CONTENT_NAME] + ] + self.assertIsNone(get_versions(repo)[1]["base_version"]) + + # create repo version 2 from version 1 + base_version = get_versions(repo)[1]["pulp_href"] + added_content = remove_created_key(self.content.pop()) + removed_content = choice(version_1_content) + modify_repo( + self.cfg, + repo, + base_version=base_version, + add_units=[added_content], + remove_units=[removed_content], + ) + repo = self.client.get(repo["pulp_href"]) + version_2_content = [ + remove_created_key(item) for item in get_content(repo)[FILE_CONTENT_NAME] + ] + + # assert that base_version of the version 2 points to version 1 + self.assertEqual(get_versions(repo)[2]["base_version"], base_version) + + # assert that the removed content is not present on repo version 2 + self.assertNotIn(removed_content, version_2_content) + + # assert that the added content is present on repo version 2 + self.assertIn(added_content, version_2_content) + + # assert that the same amount of units are present in both versions + self.assertEqual(len(version_1_content), len(version_2_content)) + + def test_base_version_exception(self): + """Exception is raised when non-existent ``base_version`` is used. + + Do the following: + + 1. Create a repository B and an attempt to specify a non-existent + ``base_version``. + 3. Assert that an HTTP exception is raised. + """ + repo = self.client.post(FILE_REPO_PATH, gen_repo()) + self.addCleanup(self.client.delete, repo["pulp_href"]) + + with self.assertRaises(HTTPError): + modify_repo(self.cfg, repo, base_version=utils.uuid4()) + + def create_sync_repo(self): + """Create, and sync a repo.""" + repo = self.client.post(FILE_REPO_PATH, gen_repo()) + self.addCleanup(self.client.delete, repo["pulp_href"]) + + body = gen_file_remote(url=FILE_FIXTURE_MANIFEST_URL) + remote = self.client.post(FILE_REMOTE_PATH, body) + self.addCleanup(self.client.delete, remote["pulp_href"]) + + sync(self.cfg, remote, repo) + return self.client.get(repo["pulp_href"]) + + +class UpdateRepoVersionTestCase(unittest.TestCase): + """Repository version can not be updated using PATCH or PUT. + + Assert that an HTTP exception is raised. + + This test targets the following issue: + + * `Pulp #4667 `_ + """ + + @classmethod + def setUpClass(cls): + """Create class-wide variables.""" + cls.cfg = config.get_config() + cls.client = api.Client(cls.cfg) + + def test_http_error(self): + """Test partial update repository version.""" + remote = self.client.post(FILE_REMOTE_PATH, gen_file_remote()) + self.addCleanup(self.client.delete, remote["pulp_href"]) + + repo = self.client.post(FILE_REPO_PATH, gen_repo()) + self.addCleanup(self.client.delete, repo["pulp_href"]) + + # create repo version + sync(self.cfg, remote, repo) + repo = self.client.get(repo["pulp_href"]) + + self.assert_patch(repo) + self.assert_put(repo) + + def assert_patch(self, repo): + """Assert PATCH method raises an HTTP exception.""" + previous_repo_name = repo["name"] + with self.assertRaises(HTTPError): + self.client.patch(repo["latest_version_href"], {"name": utils.uuid4()}) + repo = self.client.get(repo["pulp_href"]) + self.assertEqual(previous_repo_name, repo["name"], repo) + + def assert_put(self, repo): + """Assert PUT method raises an HTTP exception.""" + previous_repo_name = repo["name"] + with self.assertRaises(HTTPError): + repo["name"] = utils.uuid4() + self.client.put(repo["latest_version_href"], repo) + repo = self.client.get(repo["pulp_href"]) + self.assertEqual(previous_repo_name, repo["name"], repo) + + +class FilterArtifactsTestCase(unittest.TestCase): + """Filter artifacts by repository version. + + This test targets the following issue: + + * `Pulp #4811 `_ + """ + + @classmethod + def setUpClass(cls): + """Create class-wide variables. + + Populate Pulp with artifacts to show how the filter is related to + repository version. + """ + cls.cfg = config.get_config() + populate_pulp(cls.cfg, url=FILE_MANY_FIXTURE_MANIFEST_URL) + cls.client = api.Client(cls.cfg) + + def test_filter_last_repository_version(self): + """Filter by last repository version. + + For a repository with more than one version. + """ + repo = self.client.post(FILE_REPO_PATH, gen_repo()) + self.addCleanup(self.client.delete, repo["pulp_href"]) + + for url in [FILE2_FIXTURE_MANIFEST_URL, FILE_FIXTURE_MANIFEST_URL]: + remote = self.client.post(FILE_REMOTE_PATH, gen_file_remote(url=url)) + self.addCleanup(self.client.delete, remote["pulp_href"]) + sync(self.cfg, remote, repo) + repo = self.client.get(repo["pulp_href"]) + + artifacts = self.client.get( + ARTIFACTS_PATH, params={"repository_version": repo["latest_version_href"]} + ) + # Even though every sync adds 3 content units to the repository the fixture data contains + # the same relative urls so the second sync replaces the first 3, leaving a total of 3 each + # time + self.assertEqual(len(artifacts), FILE_FIXTURE_COUNT, artifacts) + + def test_filter_invalid_repo_version(self): + """Filter by invalid repository version.""" + repo = self.client.post(FILE_REPO_PATH, gen_repo()) + self.addCleanup(self.client.delete, repo["pulp_href"]) + with self.assertRaises(HTTPError) as ctx: + self.client.using_handler(api.json_handler).get( + ARTIFACTS_PATH, params={"repository_version": repo["pulp_href"]} + ) + for key in ("uri", "repositoryversion", "not", "found"): + self.assertIn(key, ctx.exception.response.json()[0].lower(), ctx.exception.response) + + def test_filter_valid_repo_version(self): + """Filter by valid repository version.""" + remote = self.client.post(FILE_REMOTE_PATH, gen_file_remote()) + self.addCleanup(self.client.delete, remote["pulp_href"]) + repo = self.client.post(FILE_REPO_PATH, gen_repo()) + self.addCleanup(self.client.delete, repo["pulp_href"]) + sync(self.cfg, remote, repo) + repo = self.client.get(repo["pulp_href"]) + artifacts = self.client.get( + ARTIFACTS_PATH, params={"repository_version": repo["latest_version_href"]} + ) + self.assertEqual(len(artifacts), FILE_FIXTURE_COUNT, artifacts) + + +class DeleteRepoVersionResourcesTestCase(unittest.TestCase): + """Test whether removing a repository version affects related resources. + + Test whether removing a repository version will remove a related Publication. + Test whether removing a repository version a Distribution will not be removed. + + This test targets the following issue: + + `Pulp #5303 `_ + """ + + @classmethod + def setUpClass(cls): + """Create class-wide variables.""" + cls.cfg = config.get_config() + cls.client = api.Client(cls.cfg) + + def test_delete_publication(self): + """Publication is removed once the repository version is removed.""" + repo = self.create_sync_repo(2) + version_href = self.client.get(repo["versions_href"])[0]["pulp_href"] + publication = create_file_publication(self.cfg, repo, version_href) + + # delete repo version used to create publication + self.client.delete(version_href) + + with self.assertRaises(HTTPError) as ctx: + self.client.get(publication["pulp_href"]) + + for key in ("not", "found"): + self.assertIn( + key, ctx.exception.response.json()["detail"].lower(), ctx.exception.response + ) + + def test_delete_distribution(self): + """Distribution is not removed once repository version is removed.""" + repo = self.create_sync_repo(2) + version_href = self.client.get(repo["versions_href"])[0]["pulp_href"] + publication = create_file_publication(self.cfg, repo, version_href) + + distribution = self.client.post( + FILE_DISTRIBUTION_PATH, gen_distribution(publication=publication["pulp_href"]) + ) + self.addCleanup(self.client.delete, distribution["pulp_href"]) + + # delete repo version used to create publication + self.client.delete(version_href) + + updated_distribution = self.client.get(distribution["pulp_href"]) + self.assertIsNone(updated_distribution["publication"], updated_distribution) + + def create_sync_repo(self, number_syncs=1): + """Create and sync a repository. + + Given the number of times to be synced. + """ + repo = self.client.post(FILE_REPO_PATH, gen_repo()) + self.addCleanup(self.client.delete, repo["pulp_href"]) + + remote = self.client.post(FILE_REMOTE_PATH, gen_file_remote()) + self.addCleanup(self.client.delete, remote["pulp_href"]) + + for _ in range(number_syncs): + sync(self.cfg, remote, repo) + return self.client.get(repo["pulp_href"]) + + +class ClearAllUnitsRepoVersionTestCase(unittest.TestCase): + """Test clear of all units of a given repository version. + + This test targets the following issue: + + `Pulp #4956 `_ + """ + + @classmethod + def setUpClass(cls): + """Add content to Pulp.""" + cls.cfg = config.get_config() + cls.client = api.Client(cls.cfg) + # Populate Pulp to create content units. + populate_pulp(cls.cfg, url=FILE_LARGE_FIXTURE_MANIFEST_URL) + cls.content = sample(cls.client.get(FILE_CONTENT_PATH), 10) + + def setUp(self): + """Create and sync a repository.""" + self.repo = self.client.post(FILE_REPO_PATH, gen_repo()) + self.addCleanup(self.client.delete, self.repo["pulp_href"]) + remote = self.client.post(FILE_REMOTE_PATH, gen_file_remote()) + self.addCleanup(self.client.delete, remote["pulp_href"]) + sync(self.cfg, remote, self.repo) + self.repo = self.client.get(self.repo["pulp_href"]) + + def test_add_and_clear_all_units(self): + """Test addition and removal of all units for a given repository version.""" + content = choice(self.content) + modify_repo(self.cfg, self.repo, add_units=[content], remove_units=["*"]) + self.repo = self.client.get(self.repo["pulp_href"]) + + added_content = get_content(self.repo)[FILE_CONTENT_NAME] + self.assertEqual(len(added_content), 1, added_content) + + self.assertEqual(remove_created_key(content), remove_created_key(added_content[0])) + + def test_clear_all_units_using_base_version(self): + """Test clear all units using base version.""" + for content in self.content: + modify_repo(self.cfg, self.repo, add_units=[content]) + + self.repo = self.client.get(self.repo["pulp_href"]) + base_version = get_versions(self.repo)[0]["pulp_href"] + + modify_repo(self.cfg, self.repo, base_version=base_version, remove_units=["*"]) + self.repo = self.client.get(self.repo["pulp_href"]) + + content_last_version = get_content(self.repo)[FILE_CONTENT_NAME] + self.assertEqual(len(content_last_version), 0, content_last_version) + + def test_clear_all_units(self): + """Test clear all units of a given repository version.""" + added_content = sorted( + [content["pulp_href"] for content in get_content(self.repo)[FILE_CONTENT_NAME]] + ) + + modify_repo(self.cfg, self.repo, remove_units=["*"]) + self.repo = self.client.get(self.repo["pulp_href"]) + removed_content = sorted( + [content["pulp_href"] for content in get_removed_content(self.repo)[FILE_CONTENT_NAME]] + ) + self.assertEqual(added_content, removed_content) + content = get_content(self.repo)[FILE_CONTENT_NAME] + self.assertEqual(len(content), 0, content) + + def test_http_error(self): + """Test http error is raised.""" + added_content = choice(get_added_content(self.repo)[FILE_CONTENT_NAME]) + with self.assertRaises(HTTPError) as ctx: + self.client.post( + self.repo["pulp_href"] + "modify/", + {"remove_content_units": ["*", added_content["pulp_href"]]}, + ) + for key in ("content", "units", "*"): + self.assertIn( + key, + ctx.exception.response.json()["remove_content_units"][0].lower(), + ctx.exception.response, + ) + + +class BaseVersionTestCase(unittest.TestCase): + """Associate different Content units with the same ``relative_path`` in one RepositoryVersion. + + This test targets the following issues: + + * `Pulp #4028 `_ + """ + + @classmethod + def setUpClass(cls): + """Create class-wide variables.""" + cls.cfg = config.get_config() + cls.client = api.Client(cls.cfg, api.json_handler) + + @classmethod + def tearDownClass(cls): + """Clean created resources.""" + delete_orphans() + + def test_add_content_with_base_version(self): + """Test modify repository with base_version""" + delete_orphans() + + repo = self.client.post(FILE_REPO_PATH, gen_repo()) + self.addCleanup(self.client.delete, repo["pulp_href"]) + + files = {"file": utils.http_get(FILE_URL)} + artifact = self.client.post(ARTIFACTS_PATH, files=files) + + # create first content unit. + content_attrs = {"artifact": artifact["pulp_href"], "relative_path": utils.uuid4()} + content = self.client.using_handler(api.task_handler).post(FILE_CONTENT_PATH, content_attrs) + repo_version = modify_repo(self.cfg, repo, add_units=[content]) + repo = self.client.get(repo["pulp_href"]) + + self.assertEqual(get_content(repo)[FILE_CONTENT_NAME][0], content) + + files = {"file": utils.http_get(FILE2_URL)} + artifact = self.client.post(ARTIFACTS_PATH, files=files) + + # create second content unit. + second_content_attrs = { + "artifact": artifact["pulp_href"], + "relative_path": content_attrs["relative_path"], + } + content2 = self.client.using_handler(api.task_handler).post( + FILE_CONTENT_PATH, second_content_attrs + ) + modify_repo(self.cfg, repo, add_units=[content2]) + repo = self.client.get(repo["pulp_href"]) + + self.assertEqual(get_content(repo)[FILE_CONTENT_NAME][0], content2) + + modify_repo(self.cfg, repo, base_version=repo_version["pulp_href"], add_units=[content2]) + repo = self.client.get(repo["pulp_href"]) + + self.assertEqual(get_content(repo)[FILE_CONTENT_NAME][0], content2) + + +class RepoVersionRetentionTestCase(unittest.TestCase): + """Test retain_repo_versions for repositories + + This test targets the following issues: + + * `Pulp #8368 `_ + """ + + @classmethod + def setUp(self): + """Add content to Pulp.""" + self.cfg = config.get_config() + self.client = api.Client(self.cfg, api.json_handler) + self.core_client = CoreApiClient(configuration=self.cfg.get_bindings_config()) + self.file_client = gen_file_client() + + self.content_api = ContentFilesApi(self.file_client) + self.repo_api = RepositoriesFileApi(self.file_client) + self.version_api = RepositoriesFileVersionsApi(self.file_client) + self.distro_api = DistributionsFileApi(self.file_client) + self.publication_api = PublicationsFileApi(self.file_client) + + delete_orphans() + populate_pulp(self.cfg, url=FILE_LARGE_FIXTURE_MANIFEST_URL) + self.content = sample(self.content_api.list().results, 3) + self.publications = [] + + def _create_repo_versions(self, repo_attributes={}): + self.repo = self.repo_api.create(gen_repo(**repo_attributes)) + self.addCleanup(self.repo_api.delete, self.repo.pulp_href) + + if "autopublish" in repo_attributes and repo_attributes["autopublish"]: + self.distro = create_distribution(repository_href=self.repo.pulp_href) + self.addCleanup(self.distro_api.delete, self.distro.pulp_href) + + for content in self.content: + result = self.repo_api.modify( + self.repo.pulp_href, {"add_content_units": [content.pulp_href]} + ) + monitor_task(result.task) + self.repo = self.repo_api.read(self.repo.pulp_href) + self.publications += self.publication_api.list( + repository_version=self.repo.latest_version_href + ).results + + def test_retain_repo_versions(self): + """Test repo version retention.""" + self._create_repo_versions({"retain_repo_versions": 1}) + + versions = self.version_api.list(file_file_repository_href=self.repo.pulp_href).results + self.assertEqual(len(versions), 1) + + latest_version = self.version_api.read( + file_file_repository_version_href=self.repo.latest_version_href + ) + self.assertEqual(latest_version.number, 3) + self.assertEqual(latest_version.content_summary.present["file.file"]["count"], 3) + self.assertEqual(latest_version.content_summary.added["file.file"]["count"], 3) + + def test_retain_repo_versions_on_update(self): + """Test repo version retention when retain_repo_versions is set.""" + self._create_repo_versions() + + versions = self.version_api.list(file_file_repository_href=self.repo.pulp_href).results + self.assertEqual(len(versions), 4) + + # update retain_repo_versions to 2 + result = self.repo_api.partial_update(self.repo.pulp_href, {"retain_repo_versions": 2}) + monitor_task(result.task) + + versions = self.version_api.list(file_file_repository_href=self.repo.pulp_href).results + self.assertEqual(len(versions), 2) + + latest_version = self.version_api.read( + file_file_repository_version_href=self.repo.latest_version_href + ) + self.assertEqual(latest_version.number, 3) + self.assertEqual(latest_version.content_summary.present["file.file"]["count"], 3) + self.assertEqual(latest_version.content_summary.added["file.file"]["count"], 1) + + def test_autodistribute(self): + """Test repo version retention with autopublish/autodistribute.""" + self._create_repo_versions({"retain_repo_versions": 1, "autopublish": True}) + + # all but the last publication should be gone + for publication in self.publications[:-1]: + with self.assertRaises(ApiException) as ae: + self.publication_api.read(publication.pulp_href) + self.assertEqual(404, ae.exception.status) + + # check that the last publication is distributed + manifest = download_content_unit(self.cfg, self.distro.to_dict(), "PULP_MANIFEST") + self.assertEqual(manifest.decode("utf-8").count("\n"), len(self.content)) + + +class ContentInRepositoryVersionViewTestCase(unittest.TestCase): + @classmethod + def setUpClass(cls): + """Create class-wide variables.""" + cls.cfg = config.get_config() + cls.file_client = gen_file_client() + cls.remote_api = RemotesFileApi(cls.file_client) + cls.repo_api = RepositoriesFileApi(cls.file_client) + cls.repo_ver_api = RepositoryVersionsApi(cls.file_client) + + @classmethod + def tearDownClass(cls): + delete_orphans() + + def test_all(self): + """Sync two repositories and check view filter.""" + # Test content doesn't exists. + non_existant_content_href = ( + "/pulp/api/v3/content/file/files/c4ed74cf-a806-490d-a25f-94c3c3dd2dd7/" + ) + + with self.assertRaises(ApiException) as ctx: + self.repo_ver_api.list(content=non_existant_content_href) + + self.assertEqual(ctx.exception.status, 400) + + initial_rv_count = self.repo_ver_api.list(limit=1).count + + repo = self.repo_api.create(gen_repo()) + self.addCleanup(self.repo_api.delete, repo.pulp_href) + + repo_second = self.repo_api.create(gen_repo()) + self.addCleanup(self.repo_api.delete, repo_second.pulp_href) + + remote = self.remote_api.create(gen_file_remote()) + self.addCleanup(self.remote_api.delete, remote.pulp_href) + + body = gen_file_remote(url=FILE2_FIXTURE_MANIFEST_URL) + remote_second = self.remote_api.create(body) + self.addCleanup(self.remote_api.delete, remote_second.pulp_href) + + repo_sync_data = RepositorySyncURL(remote=remote.pulp_href) + repo_sync_data_second = RepositorySyncURL(remote=remote_second.pulp_href) + + sync_response = self.repo_api.sync(repo.pulp_href, repo_sync_data) + monitor_task(sync_response.task) + + sync_response_second = self.repo_api.sync(repo_second.pulp_href, repo_sync_data_second) + monitor_task(sync_response_second.task) + + # Update repository data and get one content unit from first repository. + repo = self.repo_api.read(repo.pulp_href) + content_href = get_content(repo.to_dict())[FILE_CONTENT_NAME][0]["pulp_href"] + + rv_total = len(self.repo_ver_api.list().to_dict()["results"]) + rv_search = self.repo_ver_api.list(content=content_href).to_dict()["results"] + + # Test only one repostiory version has selected content. + self.assertEqual(len(rv_search), 1) + # Test if repositories version with content matches. + self.assertEqual(rv_search[0]["pulp_href"], repo.latest_version_href) + # Test total number of repository version. Two for each repository. + self.assertEqual(rv_total - initial_rv_count, 4) diff --git a/pulp_file/tests/functional/api/from_pulpcore/test_sync.py b/pulp_file/tests/functional/api/from_pulpcore/test_sync.py new file mode 100644 index 000000000..38c35f268 --- /dev/null +++ b/pulp_file/tests/functional/api/from_pulpcore/test_sync.py @@ -0,0 +1,186 @@ +import uuid + +from pulp_smash.pulp3.bindings import monitor_task + +from pulpcore.client.pulp_file import ( + RepositorySyncURL, +) + + +def _run_basic_sync_and_assert( + remote, file_repo, file_repo_api_client, file_content_api_client, policy="on_demand" +): + body = RepositorySyncURL(remote=remote.pulp_href) + monitor_task(file_repo_api_client.sync(file_repo.pulp_href, body).task) + + # Check content is present, but no artifacts are there + content_response = file_content_api_client.list( + repository_version=f"{file_repo.versions_href}1/" + ) + assert content_response.count == 3 + for content in content_response.results: + if policy == "immediate": + assert content.artifact is not None + else: + assert content.artifact is None + + +def test_http_sync_no_ssl( + delete_orphans_pre, + file_fixture_gen_remote, + file_repo, + file_repo_api_client, + file_content_api_client, +): + """ + Test file on_demand sync with plain http:// + """ + remote_on_demand = file_fixture_gen_remote(fixture_name="basic", policy="on_demand") + + _run_basic_sync_and_assert( + remote_on_demand, file_repo, file_repo_api_client, file_content_api_client + ) + + +def test_http_sync_ssl_tls_validation_off( + delete_orphans_pre, + file_fixture_gen_remote_ssl, + file_repo, + file_repo_api_client, + file_content_api_client, +): + """ + Test file on_demand sync with https:// serving from an untrusted certificate. + """ + remote_on_demand = file_fixture_gen_remote_ssl( + fixture_name="basic", policy="on_demand", tls_validation="false" + ) + + _run_basic_sync_and_assert( + remote_on_demand, file_repo, file_repo_api_client, file_content_api_client + ) + + +def test_http_sync_ssl_tls_validation_on( + delete_orphans_pre, + file_fixture_gen_remote_ssl, + file_repo, + file_repo_api_client, + file_content_api_client, +): + """ + Test file on_demand sync with https:// and a client connection configured to trust it. + """ + remote_on_demand = file_fixture_gen_remote_ssl( + fixture_name="basic", policy="on_demand", tls_validation="true" + ) + + _run_basic_sync_and_assert( + remote_on_demand, file_repo, file_repo_api_client, file_content_api_client + ) + + +def test_http_sync_ssl_tls_validation_defaults_to_on( + delete_orphans_pre, + file_fixture_gen_remote_ssl, + file_repo, + file_repo_api_client, + file_content_api_client, +): + """ + Test file on_demand sync with https:// and that tls validation is on by default. + """ + + remote_on_demand = file_fixture_gen_remote_ssl(fixture_name="basic", policy="on_demand") + + _run_basic_sync_and_assert( + remote_on_demand, file_repo, file_repo_api_client, file_content_api_client + ) + + +def test_http_sync_ssl_with_client_cert_req( + delete_orphans_pre, + file_fixture_gen_remote_client_cert_req, + file_repo, + file_repo_api_client, + file_content_api_client, +): + """ + Test file on_demand sync with https:// and mutual authentication between client and server. + """ + remote_on_demand = file_fixture_gen_remote_client_cert_req( + fixture_name="basic", policy="on_demand" + ) + + _run_basic_sync_and_assert( + remote_on_demand, file_repo, file_repo_api_client, file_content_api_client + ) + + +def test_ondemand_to_immediate_sync( + delete_orphans_pre, + file_fixture_gen_remote_ssl, + file_repo, + file_repo_api_client, + file_content_api_client, +): + """ + Test file on_demand sync does not bring in Artifacts, but a later sync with "immediate" will. + """ + remote_on_demand = file_fixture_gen_remote_ssl(fixture_name="basic", policy="on_demand") + + _run_basic_sync_and_assert( + remote_on_demand, + file_repo, + file_repo_api_client, + file_content_api_client, + ) + + remote_immediate = file_fixture_gen_remote_ssl(fixture_name="basic", policy="immediate") + + _run_basic_sync_and_assert( + remote_immediate, + file_repo, + file_repo_api_client, + file_content_api_client, + policy="immediate", + ) + + +def test_header_for_sync( + delete_orphans_pre, + file_fixture_server_ssl, + tls_certificate_authority_cert, + file_remote_api_client, + file_repo, + file_repo_api_client, + file_content_api_client, + gen_object_with_cleanup, +): + """ + Test file sync will correctly submit header data during download when configured. + """ + requests_record = file_fixture_server_ssl.requests_record + url = file_fixture_server_ssl.make_url("/basic/PULP_MANIFEST") + + header_name = "X-SOME-HEADER" + header_value = str(uuid.uuid4()) + headers = [{header_name: header_value}] + + remote_on_demand_data = { + "url": str(url), + "policy": "on_demand", + "name": str(uuid.uuid4()), + "ca_cert": tls_certificate_authority_cert, + "headers": headers, + } + remote_on_demand = gen_object_with_cleanup(file_remote_api_client, remote_on_demand_data) + + _run_basic_sync_and_assert( + remote_on_demand, file_repo, file_repo_api_client, file_content_api_client + ) + + assert len(requests_record) == 1 + assert requests_record[0].path == "/basic/PULP_MANIFEST" + assert header_name in requests_record[0].headers + assert header_value == requests_record[0].headers[header_name] diff --git a/pulp_file/tests/functional/api/from_pulpcore/test_tasking.py b/pulp_file/tests/functional/api/from_pulpcore/test_tasking.py new file mode 100644 index 000000000..a9fadc348 --- /dev/null +++ b/pulp_file/tests/functional/api/from_pulpcore/test_tasking.py @@ -0,0 +1,134 @@ +"""Tests related to the tasking system.""" +import unittest +from urllib.parse import urljoin + +from pulp_smash import api, config, utils +from pulp_smash.pulp3.bindings import delete_orphans +from pulp_smash.pulp3.constants import TASKS_PATH +from pulp_smash.pulp3.utils import gen_remote, gen_repo, get_content_summary, sync +from requests.exceptions import HTTPError + +from pulp_file.tests.functional.utils import gen_file_remote +from .constants import ( + FILE_FIXTURE_MANIFEST_URL, + FILE_FIXTURE_SUMMARY, + FILE_LARGE_FIXTURE_MANIFEST_URL, + FILE_REMOTE_PATH, + FILE_REPO_PATH, +) + + +class MultiResourceLockingTestCase(unittest.TestCase): + """Verify multi-resourcing locking. + + This test targets the following issues: + + * `Pulp #3186 `_ + * `Pulp Smash #879 `_ + """ + + def test_all(self): + """Verify multi-resourcing locking. + + Do the following: + + 1. Create a repository, and a remote. + 2. Update the remote to point to a different url. + 3. Immediately run a sync. The sync should fire after the update and + sync from the second url. + 4. Assert that remote url was updated. + 5. Assert that the number of units present in the repository is + according to the updated url. + """ + cfg = config.get_config() + client = api.Client(cfg, api.json_handler) + + repo = client.post(FILE_REPO_PATH, gen_repo()) + self.addCleanup(client.delete, repo["pulp_href"]) + + body = gen_file_remote(url=FILE_LARGE_FIXTURE_MANIFEST_URL) + remote = client.post(FILE_REMOTE_PATH, body) + self.addCleanup(client.delete, remote["pulp_href"]) + + url = {"url": FILE_FIXTURE_MANIFEST_URL} + client.patch(remote["pulp_href"], url) + + sync(cfg, remote, repo) + + repo = client.get(repo["pulp_href"]) + remote = client.get(remote["pulp_href"]) + self.assertEqual(remote["url"], url["url"]) + self.assertDictEqual(get_content_summary(repo), FILE_FIXTURE_SUMMARY) + + +class CancelTaskTestCase(unittest.TestCase): + """Test to cancel a task in different states. + + This test targets the following issue: + + * `Pulp #3527 `_ + * `Pulp #3634 `_ + * `Pulp Smash #976 `_ + """ + + @classmethod + def setUpClass(cls): + """Create class-wide variables.""" + cls.cfg = config.get_config() + cls.client = api.Client(cls.cfg, api.page_handler) + + def test_cancel_running_task(self): + """Cancel a running task.""" + task = self.create_long_task() + response = self.cancel_task(task) + self.assertIsNone(response["finished_at"], response) + self.assertIn(response["state"], ["canceling", "canceled"], response) + + def test_cancel_nonexistent_task(self): + """Cancel a nonexistent task.""" + task_href = urljoin(TASKS_PATH, utils.uuid4() + "/") + with self.assertRaises(HTTPError) as ctx: + self.client.patch(task_href, json={"state": "canceled"}) + for key in ("not", "found"): + self.assertIn( + key, ctx.exception.response.json()["detail"].lower(), ctx.exception.response + ) + + def test_cancel_finished_task(self): + repo = self.client.post(FILE_REPO_PATH, gen_repo()) + self.addCleanup(self.client.delete, repo["pulp_href"]) + repo["name"] = utils.uuid4() + task_href = self.client.patch(repo["pulp_href"], json=repo) + with self.assertRaises(HTTPError) as ctx: + self.cancel_task(task_href) + self.assertEqual(ctx.exception.response.status_code, 409) + self.assertEqual(ctx.exception.response.json()["state"], "completed") + + def test_delete_running_task(self): + """Delete a running task.""" + task = self.create_long_task() + with self.assertRaises(HTTPError): + self.client.delete(task["task"]) + + def create_long_task(self): + """Create a long task. Sync a repository with large files.""" + # to force the download of files. + delete_orphans() + + repo = self.client.post(FILE_REPO_PATH, gen_repo()) + self.addCleanup(self.client.delete, repo["pulp_href"]) + + body = gen_remote(url=FILE_LARGE_FIXTURE_MANIFEST_URL) + remote = self.client.post(FILE_REMOTE_PATH, body) + self.addCleanup(self.client.delete, remote["pulp_href"]) + + # use code_handler to avoid wait to the task to be completed. + return ( + self.client.using_handler(api.code_handler) + .post(urljoin(repo["pulp_href"], "sync/"), {"remote": remote["pulp_href"]}) + .json() + ) + + def cancel_task(self, task): + """Cancel a task.""" + return self.client.patch(task["task"], json={"state": "canceled"}) diff --git a/pulp_file/tests/functional/api/from_pulpcore/test_tasks.py b/pulp_file/tests/functional/api/from_pulpcore/test_tasks.py new file mode 100644 index 000000000..268fd9b4d --- /dev/null +++ b/pulp_file/tests/functional/api/from_pulpcore/test_tasks.py @@ -0,0 +1,225 @@ +"""Test that operations can be performed over tasks.""" +import unittest + +from pulp_smash import api, config, utils +from pulp_smash.pulp3.constants import ( + BASE_DISTRIBUTION_PATH, + P3_TASK_END_STATES, + TASKS_PATH, +) +from pulp_smash.pulp3.utils import gen_repo, gen_distribution, get_content, modify_repo, sync +from requests import HTTPError + +from pulp_file.tests.functional.utils import gen_file_remote, skip_if +from .constants import ( + FILE_CONTENT_NAME, + FILE_REMOTE_PATH, + FILE_REPO_PATH, +) + + +_DYNAMIC_TASKS_ATTRS = ("finished_at",) +"""Task attributes that are dynamically set by Pulp, not set by a user.""" + + +class TasksTestCase(unittest.TestCase): + """Perform different operation over tasks. + + This test targets the following issues: + + * `Pulp #3144 `_ + * `Pulp #3527 `_ + * `Pulp Smash #754 `_ + """ + + @classmethod + def setUpClass(cls): + """Create class-wide variables.""" + cls.client = api.Client(config.get_config(), api.json_handler) + cls.task = {} + + def test_01_create_task(self): + """Create a task.""" + repo = self.client.post(FILE_REPO_PATH, gen_repo()) + self.addCleanup(self.client.delete, repo["pulp_href"]) + attrs = {"description": utils.uuid4()} + response = self.client.patch(repo["pulp_href"], attrs) + self.task.update(self.client.get(response["task"])) + + @skip_if(bool, "task", False) + def test_02_read_href(self): + """Read a task by its pulp_href.""" + task = self.client.get(self.task["pulp_href"]) + for key, val in self.task.items(): + if key in _DYNAMIC_TASKS_ATTRS: + continue + with self.subTest(key=key): + self.assertEqual(task[key], val, task) + + @skip_if(bool, "task", False) + def test_02_read_href_with_specific_fields(self): + """Read a task by its pulp_hrefproviding specific fields.""" + fields = ("pulp_href", "state", "worker") + task = self.client.get(self.task["pulp_href"], params={"fields": ",".join(fields)}) + self.assertEqual(sorted(fields), sorted(task.keys())) + + @skip_if(bool, "task", False) + def test_02_read_task_without_specific_fields(self): + """Read a task by its href excluding specific fields.""" + # requests doesn't allow the use of != in parameters. + url = "{}?exclude_fields=state".format(self.task["pulp_href"]) + task = self.client.get(url) + self.assertNotIn("state", task.keys()) + + @skip_if(bool, "task", False) + def test_02_read_task_with_minimal_fields(self): + """Read a task by its href filtering minimal fields.""" + task = self.client.get(self.task["pulp_href"], params={"minimal": True}) + response_fields = task.keys() + self.assertNotIn("progress_reports", response_fields) + self.assertNotIn("parent_task", response_fields) + self.assertNotIn("error", response_fields) + + @skip_if(bool, "task", False) + def test_02_read_invalid_worker(self): + """Read a task using an invalid worker name.""" + with self.assertRaises(HTTPError): + self.filter_tasks({"worker": utils.uuid4()}) + + @skip_if(bool, "task", False) + def test_02_read_valid_worker(self): + """Read a task using a valid worker name.""" + page = self.filter_tasks({"worker": self.task["worker"]}) + self.assertNotEqual(len(page["results"]), 0, page["results"]) + + def test_02_read_invalid_date(self): + """Read a task by an invalid date.""" + with self.assertRaises(HTTPError): + self.filter_tasks({"finished_at": utils.uuid4(), "started_at": utils.uuid4()}) + + @skip_if(bool, "task", False) + def test_02_read_valid_date(self): + """Read a task by a valid date.""" + page = self.filter_tasks({"started_at": self.task["started_at"]}) + self.assertGreaterEqual(len(page["results"]), 1, page["results"]) + + @skip_if(bool, "task", False) + def test_02_search_task_by_name(self): + """Search Task by its name. + + This test targets the following issue: + + * `Pulp #4230 `_ + + Do the following: + + 1. Assert that task has a field name, and that this field is not empty. + 2. Filter the tasks by name. + 3. Assert the created task is included on the search results. + """ + # step 1 + self.assertIsNotNone(self.task.get("name")) + # step 2 + search_results = self.filter_tasks({"name": self.task["name"]}) + # step 3 + self.assertIn(self.task, search_results["results"]) + + def test_02_search_by_invalid_name(self): + """Search passing invalid name and assert nothing is returned.""" + search_results = self.filter_tasks({"name": "this-is-not-a-task-name"}) + self.assertEqual(search_results["count"], 0) + self.assertEqual(len(search_results["results"]), 0) + + @skip_if(bool, "task", False) + def test_03_delete_tasks(self): + """Delete a task.""" + # If this assertion fails, then either Pulp's tasking system or Pulp + # Smash's code for interacting with the tasking system has a bug. + self.assertIn(self.task["state"], P3_TASK_END_STATES) + self.client.delete(self.task["pulp_href"]) + with self.assertRaises(HTTPError): + self.client.get(self.task["pulp_href"]) + + def filter_tasks(self, criteria): + """Filter tasks based on the provided criteria.""" + return self.client.get(TASKS_PATH, params=criteria) + + +class FilterTaskCreatedResourcesTestCase(unittest.TestCase): + """Perform filtering over the task's field created_resources. + + This test targets the following issue: + + * `Pulp #5180 `_ + """ + + def test_read_fields_created_resources_only(self): + """Read created resources from the requested fields.""" + client = api.Client(config.get_config(), api.page_handler) + distribution_path = "{}file/file/".format(BASE_DISTRIBUTION_PATH) + response = client.post(distribution_path, gen_distribution()) + + task = client.get(response["task"]) + self.addCleanup(client.delete, task["created_resources"][0]) + + filtered_task = client.get(task["pulp_href"], params={"fields": "created_resources"}) + + self.assertEqual(len(filtered_task), 1, filtered_task) + + self.assertEqual( + task["created_resources"], filtered_task["created_resources"], filtered_task + ) + + +class FilterTaskResourcesTestCase(unittest.TestCase): + """Perform filtering of reserved resources and the contents of created resources.""" + + @classmethod + def setUpClass(cls): + cls.cfg = config.get_config() + cls.client = api.Client(cls.cfg, api.page_handler) + + def setUp(self): + self.remote = self.client.post(FILE_REMOTE_PATH, gen_file_remote()) + self.repository = self.client.post(FILE_REPO_PATH, gen_repo()) + response = sync(self.cfg, self.remote, self.repository) + self.created_repo_version = response["pulp_href"] + self.repository = self.client.get(self.repository["pulp_href"]) + for file_content in get_content(self.repository)[FILE_CONTENT_NAME]: + modify_repo(self.cfg, self.repository, remove_units=[file_content]) + attrs = {"description": utils.uuid4()} + response = self.client.patch(self.repository["pulp_href"], attrs) + self.repo_update_task = self.client.get(response["task"]) + + def tearDown(self): + self.client.delete(self.repository["pulp_href"]) + self.client.delete(self.remote["pulp_href"]) + self.client.delete(self.repo_update_task["pulp_href"]) + + def test_01_filter_tasks_by_reserved_resources(self): + """Filter all tasks by a particular reserved resource.""" + filter_params = { + "reserved_resources_record": self.repo_update_task["reserved_resources_record"][0] + } + results = self.client.get(TASKS_PATH, params=filter_params) + self.assertEqual(len(results), 5, results) + self.assertEqual(self.repo_update_task, results[0], results) + + def test_02_filter_tasks_by_non_existing_resources(self): + """Filter all tasks by a non-existing reserved resource.""" + filter_params = {"reserved_resources_record": "a_resource_should_be_never_named_like_this"} + with self.assertRaises(HTTPError): + self.client.get(TASKS_PATH, params=filter_params) + + def test_03_filter_tasks_by_created_resources(self): + """Filter all tasks by a particular created resource.""" + filter_params = {"created_resources": self.created_repo_version} + results = self.client.get(TASKS_PATH, params=filter_params) + self.assertEqual(len(results), 1, results) + self.assertEqual([self.created_repo_version], results[0]["created_resources"], results) + + def test_04_filter_tasks_by_non_existing_resources(self): + """Filter all tasks by a non-existing reserved resource.""" + filter_params = {"created_resources": "a_resource_should_be_never_named_like_this"} + with self.assertRaises(HTTPError): + self.client.get(TASKS_PATH, params=filter_params) diff --git a/pulp_file/tests/functional/api/from_pulpcore/test_unlinking_repo.py b/pulp_file/tests/functional/api/from_pulpcore/test_unlinking_repo.py new file mode 100644 index 000000000..e9855fa95 --- /dev/null +++ b/pulp_file/tests/functional/api/from_pulpcore/test_unlinking_repo.py @@ -0,0 +1,59 @@ +"""Tests that perform action over remotes""" + +import unittest + +from pulp_smash import api, config +from pulp_smash.pulp3.utils import gen_repo, get_content, sync + +from pulp_file.tests.functional.utils import gen_file_remote +from .constants import ( + FILE_CONTENT_NAME, + FILE_REMOTE_PATH, + FILE_REPO_PATH, +) + + +class RemotesTestCase(unittest.TestCase): + """Verify remotes can be used with different repos.""" + + def test_all(self): + """Verify remotes can be used with different repos. + + This test explores the design choice stated in `Pulp #3341`_ that + remove the FK from remotes to repository. + Allowing remotes to be used with different + repositories. + + .. _Pulp #3341: https://pulp.plan.io/issues/3341 + + Do the following: + + 1. Create a remote. + 2. Create 2 repositories. + 3. Sync both repositories using the same remote. + 4. Assert that the two repositories have the same contents. + """ + cfg = config.get_config() + + # Create a remote. + client = api.Client(cfg, api.json_handler) + body = gen_file_remote() + remote = client.post(FILE_REMOTE_PATH, body) + self.addCleanup(client.delete, remote["pulp_href"]) + + # Create and sync repos. + repos = [] + for _ in range(2): + repo = client.post(FILE_REPO_PATH, gen_repo()) + self.addCleanup(client.delete, repo["pulp_href"]) + sync(cfg, remote, repo) + repos.append(client.get(repo["pulp_href"])) + + # Compare contents of repositories. + contents = [] + for repo in repos: + contents.append(get_content(repo)[FILE_CONTENT_NAME]) + self.assertEqual( + {content["pulp_href"] for content in contents[0]}, + {content["pulp_href"] for content in contents[1]}, + ) diff --git a/pulp_file/tests/functional/api/test_acs.py b/pulp_file/tests/functional/api/test_acs.py index 4a013e5ac..7a448b340 100644 --- a/pulp_file/tests/functional/api/test_acs.py +++ b/pulp_file/tests/functional/api/test_acs.py @@ -20,11 +20,6 @@ RepositorySyncURL, ) from pulpcore.client.pulp_file.exceptions import ApiException -from pulpcore.tests.functional.api.using_plugin.utils import ( - gen_file_client, - gen_file_remote, - gen_repo, -) from pulp_file.tests.functional.constants import ( FILE_FIXTURE_MANIFEST_URL, @@ -32,7 +27,12 @@ FILE_MANIFEST_ONLY_FIXTURE_URL, PULP_FIXTURES_BASE_URL, ) -from pulp_file.tests.functional.utils import get_file_content_paths +from pulp_file.tests.functional.utils import ( + gen_file_client, + gen_file_remote, + gen_repo, + get_file_content_paths, +) class AlternateContentSourceTestCase(unittest.TestCase): diff --git a/pulp_file/tests/functional/api/test_auto_publish.py b/pulp_file/tests/functional/api/test_auto_publish.py index 47d0e7184..ca60bee61 100644 --- a/pulp_file/tests/functional/api/test_auto_publish.py +++ b/pulp_file/tests/functional/api/test_auto_publish.py @@ -7,7 +7,6 @@ from pulp_smash.pulp3.utils import delete_version, download_content_unit, gen_repo from pulp_file.tests.functional.utils import gen_file_client, gen_file_remote -from pulp_file.tests.functional.utils import set_up_module as setUpModule # noqa:F401 from pulpcore.client.pulp_file import ( ContentFilesApi, diff --git a/pulp_file/tests/functional/api/test_crud_content_unit.py b/pulp_file/tests/functional/api/test_crud_content_unit.py index bfe1ac93f..4e8ded0bc 100644 --- a/pulp_file/tests/functional/api/test_crud_content_unit.py +++ b/pulp_file/tests/functional/api/test_crud_content_unit.py @@ -15,11 +15,11 @@ gen_file_client, gen_file_content_attrs, gen_file_content_upload_attrs, - tasks, + gen_pulpcore_client, skip_if, ) -from pulp_file.tests.functional.utils import set_up_module as setUpModule # noqa:F401 +from pulpcore.client.pulpcore import TasksApi from pulpcore.client.pulp_file import ( ContentFilesApi, RepositoriesFileApi, @@ -233,6 +233,8 @@ def test_non_error(self): In order to avoid an HTTP error, use the same ``artifact`` and different ``relative_path``. """ + tasks_api = TasksApi(gen_pulpcore_client()) + delete_orphans() artifact = gen_artifact() @@ -243,7 +245,7 @@ def test_non_error(self): # create second content unit. response = self.file_content_api.create(**gen_file_content_attrs(artifact)) monitor_task(response.task) - task = tasks.read(response.task) + task = tasks_api.read(response.task) self.assertEqual(task.state, "completed") diff --git a/pulp_file/tests/functional/api/test_crud_remotes.py b/pulp_file/tests/functional/api/test_crud_remotes.py index 4e91db2cc..7804e0940 100644 --- a/pulp_file/tests/functional/api/test_crud_remotes.py +++ b/pulp_file/tests/functional/api/test_crud_remotes.py @@ -16,9 +16,7 @@ from pulp_file.tests.functional.utils import ( gen_file_client, gen_file_remote, - skip_if, ) -from pulp_file.tests.functional.utils import set_up_module as setUpModule # noqa:F401 from pulpcore.client.pulp_file import RemotesFileApi from pulpcore.client.pulp_file.exceptions import ApiException diff --git a/pulp_file/tests/functional/api/test_download_content.py b/pulp_file/tests/functional/api/test_download_content.py index b5ec769a7..ee6e7bd3d 100644 --- a/pulp_file/tests/functional/api/test_download_content.py +++ b/pulp_file/tests/functional/api/test_download_content.py @@ -16,7 +16,6 @@ get_file_content_paths, gen_file_remote, ) -from pulp_file.tests.functional.utils import set_up_module as setUpModule # noqa:F401 from pulpcore.client.pulp_file import ( DistributionsFileApi, diff --git a/pulp_file/tests/functional/api/test_download_policies.py b/pulp_file/tests/functional/api/test_download_policies.py index ec6c3aa52..a7183a717 100644 --- a/pulp_file/tests/functional/api/test_download_policies.py +++ b/pulp_file/tests/functional/api/test_download_policies.py @@ -16,12 +16,11 @@ FILE_FIXTURE_SUMMARY, ) from pulp_file.tests.functional.utils import ( - core_client, gen_file_client, gen_file_remote, + gen_pulpcore_client, skip_if, ) -from pulp_file.tests.functional.utils import set_up_module as setUpModule # noqa:F401 from pulpcore.client.pulpcore import ArtifactsApi from pulpcore.client.pulp_file import ( @@ -211,10 +210,11 @@ def test_all(self): # delete orphans to assure that no content units are present on the # file system delete_orphans() - client = gen_file_client() + file_client = gen_file_client() + core_client = gen_pulpcore_client() artifacts_api = ArtifactsApi(core_client) - repo_api = RepositoriesFileApi(client) - remote_api = RemotesFileApi(client) + repo_api = RepositoriesFileApi(file_client) + remote_api = RemotesFileApi(file_client) repo = repo_api.create(gen_repo()) self.addCleanup(repo_api.delete, repo.pulp_href) diff --git a/pulp_file/tests/functional/api/test_publish.py b/pulp_file/tests/functional/api/test_publish.py index 95e82b620..3ac7adadd 100644 --- a/pulp_file/tests/functional/api/test_publish.py +++ b/pulp_file/tests/functional/api/test_publish.py @@ -12,7 +12,6 @@ gen_file_client, gen_file_remote, ) -from pulp_file.tests.functional.utils import set_up_module as setUpModule # noqa:F401 from pulpcore.client.pulp_file import ( PublicationsFileApi, diff --git a/pulp_file/tests/functional/api/test_pulp_manifest.py b/pulp_file/tests/functional/api/test_pulp_manifest.py index dc8f61e78..32f4c70f9 100644 --- a/pulp_file/tests/functional/api/test_pulp_manifest.py +++ b/pulp_file/tests/functional/api/test_pulp_manifest.py @@ -10,7 +10,6 @@ from pulp_file.tests.functional.constants import FILE_FIXTURE_COUNT from pulp_file.tests.functional.utils import gen_file_client, gen_file_remote -from pulp_file.tests.functional.utils import set_up_module as setUpModule # noqa:F401 from pulpcore.client.pulp_file import ( DistributionsFileApi, diff --git a/pulp_file/tests/functional/api/test_rbac.py b/pulp_file/tests/functional/api/test_rbac.py index afc0dd4e1..c9b3813e4 100644 --- a/pulp_file/tests/functional/api/test_rbac.py +++ b/pulp_file/tests/functional/api/test_rbac.py @@ -8,9 +8,7 @@ ApiException, ) from pulpcore.client.pulp_file import AsyncOperationResponse -from pulp_file.tests.functional.utils import ( # noqa:F401 - set_up_module as setUpModule, -) + from pulp_file.tests.functional.utils import gen_repo, gen_file_remote diff --git a/pulp_file/tests/functional/api/test_sync.py b/pulp_file/tests/functional/api/test_sync.py index 5be451d2c..a003ae15c 100644 --- a/pulp_file/tests/functional/api/test_sync.py +++ b/pulp_file/tests/functional/api/test_sync.py @@ -18,7 +18,6 @@ FILE2_FIXTURE_MANIFEST_URL, ) from pulp_file.tests.functional.utils import gen_file_client, gen_file_remote -from pulp_file.tests.functional.utils import set_up_module as setUpModule # noqa:F401 from pulpcore.client.pulp_file import ( RepositoriesFileApi, diff --git a/pulp_file/tests/functional/conftest.py b/pulp_file/tests/functional/conftest.py index 9aa74395e..498ddd455 100644 --- a/pulp_file/tests/functional/conftest.py +++ b/pulp_file/tests/functional/conftest.py @@ -16,9 +16,10 @@ RemotesFileApi, PublicationsFileApi, ) -from pulpcore.tests.functional.api.using_plugin.utils import gen_file_client from pulp_smash.pulp3.utils import gen_repo +from pulp_file.tests.functional.utils import gen_file_client + _logger = logging.getLogger(__name__) diff --git a/pulp_file/tests/fixtures/basic/1.iso b/pulp_file/tests/functional/fixtures/basic/1.iso similarity index 100% rename from pulp_file/tests/fixtures/basic/1.iso rename to pulp_file/tests/functional/fixtures/basic/1.iso diff --git a/pulp_file/tests/fixtures/basic/2.iso b/pulp_file/tests/functional/fixtures/basic/2.iso similarity index 100% rename from pulp_file/tests/fixtures/basic/2.iso rename to pulp_file/tests/functional/fixtures/basic/2.iso diff --git a/pulp_file/tests/fixtures/basic/3.iso b/pulp_file/tests/functional/fixtures/basic/3.iso similarity index 100% rename from pulp_file/tests/fixtures/basic/3.iso rename to pulp_file/tests/functional/fixtures/basic/3.iso diff --git a/pulp_file/tests/fixtures/basic/PULP_MANIFEST b/pulp_file/tests/functional/fixtures/basic/PULP_MANIFEST similarity index 100% rename from pulp_file/tests/fixtures/basic/PULP_MANIFEST rename to pulp_file/tests/functional/fixtures/basic/PULP_MANIFEST diff --git a/pulp_file/tests/functional/utils.py b/pulp_file/tests/functional/utils.py index d1a3f0d68..e306b712d 100644 --- a/pulp_file/tests/functional/utils.py +++ b/pulp_file/tests/functional/utils.py @@ -1,17 +1,18 @@ # coding=utf-8 """Utilities for tests for the file plugin.""" +from datetime import datetime from functools import partial import requests from unittest import SkipTest from tempfile import NamedTemporaryFile -from pulp_smash import api, config, selectors, utils +from pulp_smash import api, cli, config, selectors, utils +from pulp_smash.pulp3.bindings import monitor_task +from pulp_smash.pulp3.constants import STATUS_PATH from pulp_smash.pulp3.utils import ( gen_remote, gen_repo, get_content, - require_pulp_3, - require_pulp_plugins, sync, ) @@ -28,26 +29,18 @@ from pulpcore.client.pulpcore import ( ApiClient as CoreApiClient, ArtifactsApi, - TasksApi, + ExportersPulpApi, + UsersApi, + UsersRolesApi, ) from pulpcore.client.pulp_file import ApiClient as FileApiClient +from pulpcore.client.pulp_file import DistributionsFileApi cfg = config.get_config() configuration = cfg.get_bindings_config() -def set_up_module(): - """Skip tests Pulp 3 isn't under test or if pulp-file isn't installed.""" - require_pulp_3(SkipTest) - require_pulp_plugins({"file"}, SkipTest) - - -def gen_file_client(): - """Return an OBJECT for file client.""" - return FileApiClient(configuration) - - def gen_file_remote(url=FILE_FIXTURE_MANIFEST_URL, **kwargs): """Return a semi-random dict for use in creating a file Remote. @@ -111,6 +104,38 @@ def populate_pulp(cfg, url=FILE_FIXTURE_MANIFEST_URL): return client.get(FILE_CONTENT_PATH)["results"] +skip_if = partial(selectors.skip_if, exc=SkipTest) # pylint:disable=invalid-name +"""The ``@skip_if`` decorator, customized for unittest. + +:func:`pulp_smash.selectors.skip_if` is test runner agnostic. This function is +identical, except that ``exc`` has been set to ``unittest.SkipTest``. +""" + + +def gen_artifact(url=FILE_URL, file=None): + """Creates an artifact.""" + core_client = gen_pulpcore_client() + if not file: + response = requests.get(url) + with NamedTemporaryFile() as temp_file: + temp_file.write(response.content) + return ArtifactsApi(core_client).create(file=temp_file.name).to_dict() + + return ArtifactsApi(core_client).create(file=file).to_dict() + + +def gen_pulpcore_client(): + """Return an OBJECT for pulpcore client.""" + configuration = config.get_config().get_bindings_config() + return CoreApiClient(configuration) + + +def gen_file_client(): + """Return an OBJECT for file client.""" + configuration = config.get_config().get_bindings_config() + return FileApiClient(configuration) + + def create_file_publication(cfg, repo, version_href=None): """Create a file publication. @@ -125,30 +150,188 @@ def create_file_publication(cfg, repo, version_href=None): body = {"repository_version": version_href} else: body = {"repository": repo["pulp_href"]} + return api.Client(cfg).post(FILE_PUBLICATION_PATH, body) - client = api.Client(cfg, api.json_handler) - call_report = client.post(FILE_PUBLICATION_PATH, body) - tasks = tuple(api.poll_spawned_tasks(cfg, call_report)) - return client.get(tasks[-1]["created_resources"][0]) +def create_repo_and_versions(syncd_repo, repo_api, versions_api, content_api): + """Create a repo with multiple versions. -skip_if = partial(selectors.skip_if, exc=SkipTest) # pylint:disable=invalid-name -"""The ``@skip_if`` decorator, customized for unittest. - -:func:`pulp_smash.selectors.skip_if` is test runner agnostic. This function is -identical, except that ``exc`` has been set to ``unittest.SkipTest``. -""" + :param syncd_repo: A Repository that has at least three Content-units for us to copy from. + :param pulpcore.client.pulp_file.RepositoriesFileApi repo_api: client to talk to the Repository + API + :param pulpcore.client.pulp_file.RepositoriesFileVersionsApi versions_api: client to talk to + the RepositoryVersions API + :param pulpcore.client.pulp_file.ContentFilesApi content_api: client to talk to the Content API + :returns: A (FileRepository, [FileRepositoryVersion...]) tuple + """ + # Create a new file-repo + a_repo = repo_api.create(gen_repo()) + # get a list of all the files from one of our existing repos + file_list = content_api.list(repository_version=syncd_repo.latest_version_href) + # copy files from repositories[0] into new, one file at a time + results = file_list.results + for a_file in results: + href = a_file.pulp_href + modify_response = repo_api.modify(a_repo.pulp_href, {"add_content_units": [href]}) + monitor_task(modify_response.task) + # get all versions of that repo + versions = versions_api.list(a_repo.pulp_href, ordering="number") + return a_repo, versions + + +def delete_exporter(exporter): + """ + Utility routine to delete an exporter and any exported files + :param exporter : PulpExporter to delete + """ + cfg = config.get_config() + cli_client = cli.Client(cfg) + core_client = CoreApiClient(configuration=cfg.get_bindings_config()) + exporter_api = ExportersPulpApi(core_client) + cmd = ("rm", "-rf", exporter.path) + + cli_client.run(cmd, sudo=True) + result = exporter_api.delete(exporter.pulp_href) + monitor_task(result.task) + + +def create_distribution(repository_href=None): + """Utility to create a pulp_file distribution.""" + file_client = gen_file_client() + distro_api = DistributionsFileApi(file_client) + + body = {"name": utils.uuid4(), "base_path": utils.uuid4()} + if repository_href: + body["repository"] = repository_href + + result = distro_api.create(body) + distro_href = monitor_task(result.task).created_resources[0] + distro = distro_api.read(distro_href) + return distro + + +CREATE_USER_CMD = [ + "from django.contrib.auth import get_user_model", + "from django.urls import resolve", + "from guardian.shortcuts import assign_perm", + "", + "user = get_user_model().objects.create(username='{username}')", + "user.set_password('{password}')", + "user.save()", + "", + "for permission in {model_permissions!r}:", + " assign_perm(permission, user)", + "", + "for permission, obj_url in {object_permissions!r}:", + " func, _, kwargs = resolve(obj_url)", + " obj = func.cls.queryset.get(pk=kwargs['pk'])", + " assign_perm(permission, user, obj)", +] + + +DELETE_USER_CMD = [ + "from django.contrib.auth import get_user_model", + "get_user_model().objects.get(username='{username}').delete()", +] + + +def gen_user(cfg=config.get_config(), model_permissions=None, object_permissions=None): + """Create a user with a set of permissions in the pulp database.""" + cli_client = cli.Client(cfg) + + if model_permissions is None: + model_permissions = [] + + if object_permissions is None: + object_permissions = [] + + user = { + "username": utils.uuid4(), + "password": utils.uuid4(), + "model_permissions": model_permissions, + "object_permissions": object_permissions, + } + utils.execute_pulpcore_python( + cli_client, + "\n".join(CREATE_USER_CMD).format(**user), + ) + + api_config = cfg.get_bindings_config() + api_config.username = user["username"] + api_config.password = user["password"] + user["core_api_client"] = CoreApiClient(api_config) + user["api_client"] = FileApiClient(api_config) + user["distribution_api"] = DistributionsFileApi(user["api_client"]) + return user + + +def del_user(user, cfg=config.get_config()): + """Delete a user from the pulp database.""" + cli_client = cli.Client(cfg) + utils.execute_pulpcore_python( + cli_client, + "\n".join(DELETE_USER_CMD).format(**user), + ) + + +def gen_user_rest(cfg=None, model_roles=None, object_roles=None, **kwargs): + """Add a user with a set of roles using the REST API.""" + if cfg is None: + cfg = config.get_config() + api_config = cfg.get_bindings_config() + admin_core_client = CoreApiClient(api_config) + admin_user_api = UsersApi(admin_core_client) + admin_user_roles_api = UsersRolesApi(admin_core_client) + + user_body = { + "username": utils.uuid4(), + "password": utils.uuid4(), + } + user_body.update(kwargs) + + user = admin_user_api.create(user_body) + + if model_roles: + for role in model_roles: + user_role = {"role": role, "content_object": None} + admin_user_roles_api.create(user.pulp_href, user_role) + if object_roles: + for role, obj in object_roles: + user_role = {"role": role, "content_object": obj} + admin_user_roles_api.create(user.pulp_href, user_role) + + user_body.update(user.to_dict()) + return user_body + + +def del_user_rest(user_href, cfg=None): + """Delete a user using the REST API.""" + if cfg is None: + cfg = config.get_config() + api_config = cfg.get_bindings_config() + admin_core_client = CoreApiClient(api_config) + admin_user_api = UsersApi(admin_core_client) + + admin_user_api.delete(user_href) + + +def get_redis_status(): + """Return a boolean value which tells whether the connection to redis was established or not.""" + api_client = api.Client(config.get_config(), api.json_handler) + status_response = api_client.get(STATUS_PATH) -core_client = CoreApiClient(configuration) -tasks = TasksApi(core_client) + try: + is_redis_connected = status_response["redis_connection"]["connected"] + except (KeyError, TypeError): + is_redis_connected = False + return is_redis_connected -def gen_artifact(url=FILE_URL, file=None): - """Creates an artifact.""" - if not file: - response = requests.get(url) - with NamedTemporaryFile() as temp_file: - temp_file.write(response.content) - return ArtifactsApi(core_client).create(file=temp_file.name).to_dict() +def parse_date_from_string(s, parse_format="%Y-%m-%dT%H:%M:%S.%fZ"): + """Parse string to datetime object. - return ArtifactsApi(core_client).create(file=file).to_dict() + :param s: str like '2018-11-18T21:03:32.493697Z' + :param parse_format: str defaults to %Y-%m-%dT%H:%M:%S.%fZ + :return: datetime.datetime + """ + return datetime.strptime(s, parse_format)