diff --git a/packages/certifi/__init__.py b/packages/certifi/__init__.py index eebdf8886..8ce89cef7 100644 --- a/packages/certifi/__init__.py +++ b/packages/certifi/__init__.py @@ -1,3 +1,4 @@ from .core import contents, where -__version__ = "2021.05.30" +__all__ = ["contents", "where"] +__version__ = "2023.07.22" diff --git a/packages/certifi/cacert.pem b/packages/certifi/cacert.pem index 96e2fc65a..02123695d 100644 --- a/packages/certifi/cacert.pem +++ b/packages/certifi/cacert.pem @@ -28,36 +28,6 @@ DKqC5JlR3XC321Y9YeRq4VzW9v493kHMB65jUr9TU/Qr6cf9tveCX4XSQRjbgbME HMUfpIBvFSDJ3gyICh3WZlXi/EjJKSZp4A== -----END CERTIFICATE----- -# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2 -# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2 -# Label: "GlobalSign Root CA - R2" -# Serial: 4835703278459682885658125 -# MD5 Fingerprint: 94:14:77:7e:3e:5e:fd:8f:30:bd:41:b0:cf:e7:d0:30 -# SHA1 Fingerprint: 75:e0:ab:b6:13:85:12:27:1c:04:f8:5f:dd:de:38:e4:b7:24:2e:fe -# SHA256 Fingerprint: ca:42:dd:41:74:5f:d0:b8:1e:b9:02:36:2c:f9:d8:bf:71:9d:a1:bd:1b:1e:fc:94:6f:5b:4c:99:f4:2c:1b:9e ------BEGIN CERTIFICATE----- -MIIDujCCAqKgAwIBAgILBAAAAAABD4Ym5g0wDQYJKoZIhvcNAQEFBQAwTDEgMB4G -A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjIxEzARBgNVBAoTCkdsb2JhbFNp -Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDYxMjE1MDgwMDAwWhcNMjExMjE1 -MDgwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMjETMBEG -A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI -hvcNAQEBBQADggEPADCCAQoCggEBAKbPJA6+Lm8omUVCxKs+IVSbC9N/hHD6ErPL -v4dfxn+G07IwXNb9rfF73OX4YJYJkhD10FPe+3t+c4isUoh7SqbKSaZeqKeMWhG8 -eoLrvozps6yWJQeXSpkqBy+0Hne/ig+1AnwblrjFuTosvNYSuetZfeLQBoZfXklq -tTleiDTsvHgMCJiEbKjNS7SgfQx5TfC4LcshytVsW33hoCmEofnTlEnLJGKRILzd -C9XZzPnqJworc5HGnRusyMvo4KD0L5CLTfuwNhv2GXqF4G3yYROIXJ/gkwpRl4pa -zq+r1feqCapgvdzZX99yqWATXgAByUr6P6TqBwMhAo6CygPCm48CAwEAAaOBnDCB -mTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUm+IH -V2ccHsBqBt5ZtJot39wZhi4wNgYDVR0fBC8wLTAroCmgJ4YlaHR0cDovL2NybC5n -bG9iYWxzaWduLm5ldC9yb290LXIyLmNybDAfBgNVHSMEGDAWgBSb4gdXZxwewGoG -3lm0mi3f3BmGLjANBgkqhkiG9w0BAQUFAAOCAQEAmYFThxxol4aR7OBKuEQLq4Gs -J0/WwbgcQ3izDJr86iw8bmEbTUsp9Z8FHSbBuOmDAGJFtqkIk7mpM0sYmsL4h4hO -291xNBrBVNpGP+DTKqttVCL1OmLNIG+6KYnX3ZHu01yiPqFbQfXf5WRDLenVOavS -ot+3i9DAgBkcRcAtjOj4LaR0VknFBbVPFd5uRHg5h6h+u/N5GJG79G+dwfCMNYxd -AfvDbbnvRG15RjF+Cv6pgsH/76tuIMRQyV+dTZsXjAzlAcmgQWpzU/qlULRuJQ/7 -TBj0/VLZjmmx6BEP3ojY+x1J96relc8geMJgEtslQIxq/H5COEBkEveegeGTLg== ------END CERTIFICATE----- - # Issuer: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited # Subject: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited # Label: "Entrust.net Premium 2048 Secure Server CA" @@ -491,34 +461,6 @@ vEsXCS+0yx5DaMkHJ8HSXPfqIbloEpw8nL+e/IBcm2PN7EeqJSdnoDfzAIJ9VNep +OkuE6N36B9K -----END CERTIFICATE----- -# Issuer: CN=DST Root CA X3 O=Digital Signature Trust Co. -# Subject: CN=DST Root CA X3 O=Digital Signature Trust Co. -# Label: "DST Root CA X3" -# Serial: 91299735575339953335919266965803778155 -# MD5 Fingerprint: 41:03:52:dc:0f:f7:50:1b:16:f0:02:8e:ba:6f:45:c5 -# SHA1 Fingerprint: da:c9:02:4f:54:d8:f6:df:94:93:5f:b1:73:26:38:ca:6a:d7:7c:13 -# SHA256 Fingerprint: 06:87:26:03:31:a7:24:03:d9:09:f1:05:e6:9b:cf:0d:32:e1:bd:24:93:ff:c6:d9:20:6d:11:bc:d6:77:07:39 ------BEGIN CERTIFICATE----- -MIIDSjCCAjKgAwIBAgIQRK+wgNajJ7qJMDmGLvhAazANBgkqhkiG9w0BAQUFADA/ -MSQwIgYDVQQKExtEaWdpdGFsIFNpZ25hdHVyZSBUcnVzdCBDby4xFzAVBgNVBAMT -DkRTVCBSb290IENBIFgzMB4XDTAwMDkzMDIxMTIxOVoXDTIxMDkzMDE0MDExNVow -PzEkMCIGA1UEChMbRGlnaXRhbCBTaWduYXR1cmUgVHJ1c3QgQ28uMRcwFQYDVQQD -Ew5EU1QgUm9vdCBDQSBYMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB -AN+v6ZdQCINXtMxiZfaQguzH0yxrMMpb7NnDfcdAwRgUi+DoM3ZJKuM/IUmTrE4O -rz5Iy2Xu/NMhD2XSKtkyj4zl93ewEnu1lcCJo6m67XMuegwGMoOifooUMM0RoOEq -OLl5CjH9UL2AZd+3UWODyOKIYepLYYHsUmu5ouJLGiifSKOeDNoJjj4XLh7dIN9b -xiqKqy69cK3FCxolkHRyxXtqqzTWMIn/5WgTe1QLyNau7Fqckh49ZLOMxt+/yUFw -7BZy1SbsOFU5Q9D8/RhcQPGX69Wam40dutolucbY38EVAjqr2m7xPi71XAicPNaD -aeQQmxkqtilX4+U9m5/wAl0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNV -HQ8BAf8EBAMCAQYwHQYDVR0OBBYEFMSnsaR7LHH62+FLkHX/xBVghYkQMA0GCSqG -SIb3DQEBBQUAA4IBAQCjGiybFwBcqR7uKGY3Or+Dxz9LwwmglSBd49lZRNI+DT69 -ikugdB/OEIKcdBodfpga3csTS7MgROSR6cz8faXbauX+5v3gTt23ADq1cEmv8uXr -AvHRAosZy5Q6XkjEGB5YGV8eAlrwDPGxrancWYaLbumR9YbK+rlmM6pZW87ipxZz -R8srzJmwN0jP41ZL9c8PDHIyh8bwRLtTcm1D9SZImlJnt1ir/md2cXjbDaJWFBM5 -JDGFoqgCWjBH4d1QB7wCCZAA62RjYJsWvIjJEubSfZGL+T0yjWW06XyxV3bqxbYo -Ob8VZRzI9neWagqNdwvYkQsEjgfbKbYK7p2CNTUQ ------END CERTIFICATE----- - # Issuer: CN=SwissSign Gold CA - G2 O=SwissSign AG # Subject: CN=SwissSign Gold CA - G2 O=SwissSign AG # Label: "SwissSign Gold CA - G2" @@ -694,37 +636,6 @@ BA6+C4OmF4O5MBKgxTMVBbkN+8cFduPYSo38NBejxiEovjBFMR7HeL5YYTisO+IB ZQ== -----END CERTIFICATE----- -# Issuer: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C. -# Subject: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C. -# Label: "Network Solutions Certificate Authority" -# Serial: 116697915152937497490437556386812487904 -# MD5 Fingerprint: d3:f3:a6:16:c0:fa:6b:1d:59:b1:2d:96:4d:0e:11:2e -# SHA1 Fingerprint: 74:f8:a3:c3:ef:e7:b3:90:06:4b:83:90:3c:21:64:60:20:e5:df:ce -# SHA256 Fingerprint: 15:f0:ba:00:a3:ac:7a:f3:ac:88:4c:07:2b:10:11:a0:77:bd:77:c0:97:f4:01:64:b2:f8:59:8a:bd:83:86:0c ------BEGIN CERTIFICATE----- -MIID5jCCAs6gAwIBAgIQV8szb8JcFuZHFhfjkDFo4DANBgkqhkiG9w0BAQUFADBi -MQswCQYDVQQGEwJVUzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMu -MTAwLgYDVQQDEydOZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3Jp -dHkwHhcNMDYxMjAxMDAwMDAwWhcNMjkxMjMxMjM1OTU5WjBiMQswCQYDVQQGEwJV -UzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMuMTAwLgYDVQQDEydO -ZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwggEiMA0GCSqG -SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDkvH6SMG3G2I4rC7xGzuAnlt7e+foS0zwz -c7MEL7xxjOWftiJgPl9dzgn/ggwbmlFQGiaJ3dVhXRncEg8tCqJDXRfQNJIg6nPP -OCwGJgl6cvf6UDL4wpPTaaIjzkGxzOTVHzbRijr4jGPiFFlp7Q3Tf2vouAPlT2rl -mGNpSAW+Lv8ztumXWWn4Zxmuk2GWRBXTcrA/vGp97Eh/jcOrqnErU2lBUzS1sLnF -BgrEsEX1QV1uiUV7PTsmjHTC5dLRfbIR1PtYMiKagMnc/Qzpf14Dl847ABSHJ3A4 -qY5usyd2mFHgBeMhqxrVhSI8KbWaFsWAqPS7azCPL0YCorEMIuDTAgMBAAGjgZcw -gZQwHQYDVR0OBBYEFCEwyfsA106Y2oeqKtCnLrFAMadMMA4GA1UdDwEB/wQEAwIB -BjAPBgNVHRMBAf8EBTADAQH/MFIGA1UdHwRLMEkwR6BFoEOGQWh0dHA6Ly9jcmwu -bmV0c29sc3NsLmNvbS9OZXR3b3JrU29sdXRpb25zQ2VydGlmaWNhdGVBdXRob3Jp -dHkuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQC7rkvnt1frf6ott3NHhWrB5KUd5Oc8 -6fRZZXe1eltajSU24HqXLjjAV2CDmAaDn7l2em5Q4LqILPxFzBiwmZVRDuwduIj/ -h1AcgsLj4DKAv6ALR8jDMe+ZZzKATxcheQxpXN5eNK4CtSbqUN9/GGUsyfJj4akH -/nxxH2szJGoeBfcFaMBqEssuXmHLrijTfsK0ZpEmXzwuJF/LWA/rKOyvEZbz3Htv -wKeI8lN3s2Berq4o2jUsbzRF0ybh3uxbTydrFny9RAQYgrOJeRcQcT16ohZO9QHN -pGxlaKFJdlxDydi8NmdspZS11My5vWo1ViHe2MPr+8ukYEywVaCge1ey ------END CERTIFICATE----- - # Issuer: CN=COMODO ECC Certification Authority O=COMODO CA Limited # Subject: CN=COMODO ECC Certification Authority O=COMODO CA Limited # Label: "COMODO ECC Certification Authority" @@ -779,36 +690,6 @@ t0QmwCbAr1UwnjvVNioZBPRcHv/PLLf/0P2HQBHVESO7SMAhqaQoLf0V+LBOK/Qw WyH8EZE0vkHve52Xdf+XlcCWWC/qu0bXu+TZLg== -----END CERTIFICATE----- -# Issuer: CN=Cybertrust Global Root O=Cybertrust, Inc -# Subject: CN=Cybertrust Global Root O=Cybertrust, Inc -# Label: "Cybertrust Global Root" -# Serial: 4835703278459682877484360 -# MD5 Fingerprint: 72:e4:4a:87:e3:69:40:80:77:ea:bc:e3:f4:ff:f0:e1 -# SHA1 Fingerprint: 5f:43:e5:b1:bf:f8:78:8c:ac:1c:c7:ca:4a:9a:c6:22:2b:cc:34:c6 -# SHA256 Fingerprint: 96:0a:df:00:63:e9:63:56:75:0c:29:65:dd:0a:08:67:da:0b:9c:bd:6e:77:71:4a:ea:fb:23:49:ab:39:3d:a3 ------BEGIN CERTIFICATE----- -MIIDoTCCAomgAwIBAgILBAAAAAABD4WqLUgwDQYJKoZIhvcNAQEFBQAwOzEYMBYG -A1UEChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2Jh -bCBSb290MB4XDTA2MTIxNTA4MDAwMFoXDTIxMTIxNTA4MDAwMFowOzEYMBYGA1UE -ChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2JhbCBS -b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA+Mi8vRRQZhP/8NN5 -7CPytxrHjoXxEnOmGaoQ25yiZXRadz5RfVb23CO21O1fWLE3TdVJDm71aofW0ozS -J8bi/zafmGWgE07GKmSb1ZASzxQG9Dvj1Ci+6A74q05IlG2OlTEQXO2iLb3VOm2y -HLtgwEZLAfVJrn5GitB0jaEMAs7u/OePuGtm839EAL9mJRQr3RAwHQeWP032a7iP -t3sMpTjr3kfb1V05/Iin89cqdPHoWqI7n1C6poxFNcJQZZXcY4Lv3b93TZxiyWNz -FtApD0mpSPCzqrdsxacwOUBdrsTiXSZT8M4cIwhhqJQZugRiQOwfOHB3EgZxpzAY -XSUnpQIDAQABo4GlMIGiMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/ -MB0GA1UdDgQWBBS2CHsNesysIEyGVjJez6tuhS1wVzA/BgNVHR8EODA2MDSgMqAw -hi5odHRwOi8vd3d3Mi5wdWJsaWMtdHJ1c3QuY29tL2NybC9jdC9jdHJvb3QuY3Js -MB8GA1UdIwQYMBaAFLYIew16zKwgTIZWMl7Pq26FLXBXMA0GCSqGSIb3DQEBBQUA -A4IBAQBW7wojoFROlZfJ+InaRcHUowAl9B8Tq7ejhVhpwjCt2BWKLePJzYFa+HMj -Wqd8BfP9IjsO0QbE2zZMcwSO5bAi5MXzLqXZI+O4Tkogp24CJJ8iYGd7ix1yCcUx -XOl5n4BHPa2hCwcUPUf/A2kaDAtE52Mlp3+yybh2hO0j9n0Hq0V+09+zv+mKts2o -omcrUtW3ZfA5TGOgkXmTUg9U3YO7n9GPp1Nzw8v/MOx8BLjYRB+TX3EJIrduPuoc -A06dGiBh+4E37F78CkWr1+cXVdCg6mCbpvbjjFspwgZgFJ0tl0ypkxWdYcQBX0jW -WL1WMRJOEcgh4LMRkWXbtKaIOM5V ------END CERTIFICATE----- - # Issuer: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority # Subject: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority # Label: "ePKI Root Certification Authority" @@ -910,34 +791,6 @@ uLjbvrW5KfnaNwUASZQDhETnv0Mxz3WLJdH0pmT1kvarBes96aULNmLazAZfNou2 XjG4Kvte9nHfRCaexOYNkbQudZWAUWpLMKawYqGT8ZvYzsRjdT9ZR7E= -----END CERTIFICATE----- -# Issuer: CN=Hongkong Post Root CA 1 O=Hongkong Post -# Subject: CN=Hongkong Post Root CA 1 O=Hongkong Post -# Label: "Hongkong Post Root CA 1" -# Serial: 1000 -# MD5 Fingerprint: a8:0d:6f:39:78:b9:43:6d:77:42:6d:98:5a:cc:23:ca -# SHA1 Fingerprint: d6:da:a8:20:8d:09:d2:15:4d:24:b5:2f:cb:34:6e:b2:58:b2:8a:58 -# SHA256 Fingerprint: f9:e6:7d:33:6c:51:00:2a:c0:54:c6:32:02:2d:66:dd:a2:e7:e3:ff:f1:0a:d0:61:ed:31:d8:bb:b4:10:cf:b2 ------BEGIN CERTIFICATE----- -MIIDMDCCAhigAwIBAgICA+gwDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UEBhMCSEsx -FjAUBgNVBAoTDUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdrb25nIFBvc3Qg -Um9vdCBDQSAxMB4XDTAzMDUxNTA1MTMxNFoXDTIzMDUxNTA0NTIyOVowRzELMAkG -A1UEBhMCSEsxFjAUBgNVBAoTDUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdr -b25nIFBvc3QgUm9vdCBDQSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC -AQEArP84tulmAknjorThkPlAj3n54r15/gK97iSSHSL22oVyaf7XPwnU3ZG1ApzQ -jVrhVcNQhrkpJsLj2aDxaQMoIIBFIi1WpztUlVYiWR8o3x8gPW2iNr4joLFutbEn -PzlTCeqrauh0ssJlXI6/fMN4hM2eFvz1Lk8gKgifd/PFHsSaUmYeSF7jEAaPIpjh -ZY4bXSNmO7ilMlHIhqqhqZ5/dpTCpmy3QfDVyAY45tQM4vM7TG1QjMSDJ8EThFk9 -nnV0ttgCXjqQesBCNnLsak3c78QA3xMYV18meMjWCnl3v/evt3a5pQuEF10Q6m/h -q5URX208o1xNg1vysxmKgIsLhwIDAQABoyYwJDASBgNVHRMBAf8ECDAGAQH/AgED -MA4GA1UdDwEB/wQEAwIBxjANBgkqhkiG9w0BAQUFAAOCAQEADkbVPK7ih9legYsC -mEEIjEy82tvuJxuC52pF7BaLT4Wg87JwvVqWuspube5Gi27nKi6Wsxkz67SfqLI3 -7piol7Yutmcn1KZJ/RyTZXaeQi/cImyaT/JaFTmxcdcrUehtHJjA2Sr0oYJ71clB -oiMBdDhViw+5LmeiIAQ32pwL0xch4I+XeTRvhEgCIDMb5jREn5Fw9IBehEPCKdJs -EhTkYY2sEJCehFC78JZvRZ+K88psT/oROhUVRsPNH4NbLUES7VBnQRM9IauUiqpO -fMGx+6fWtScvl6tu4B3i0RwsH0Ti/L6RoZz71ilTc4afU9hDDl3WY4JxHYB0yvbi -AmvZWg== ------END CERTIFICATE----- - # Issuer: CN=SecureSign RootCA11 O=Japan Certification Services, Inc. # Subject: CN=SecureSign RootCA11 O=Japan Certification Services, Inc. # Label: "SecureSign RootCA11" @@ -1411,78 +1264,6 @@ t/2jioSgrGK+KwmHNPBqAbubKVY8/gA3zyNs8U6qtnRGEmyR7jTV7JqR50S+kDFy SjnRBUkLp7Y3gaVdjKozXoEofKd9J+sAro03 -----END CERTIFICATE----- -# Issuer: CN=EC-ACC O=Agencia Catalana de Certificacio (NIF Q-0801176-I) OU=Serveis Publics de Certificacio/Vegeu https://www.catcert.net/verarrel (c)03/Jerarquia Entitats de Certificacio Catalanes -# Subject: CN=EC-ACC O=Agencia Catalana de Certificacio (NIF Q-0801176-I) OU=Serveis Publics de Certificacio/Vegeu https://www.catcert.net/verarrel (c)03/Jerarquia Entitats de Certificacio Catalanes -# Label: "EC-ACC" -# Serial: -23701579247955709139626555126524820479 -# MD5 Fingerprint: eb:f5:9d:29:0d:61:f9:42:1f:7c:c2:ba:6d:e3:15:09 -# SHA1 Fingerprint: 28:90:3a:63:5b:52:80:fa:e6:77:4c:0b:6d:a7:d6:ba:a6:4a:f2:e8 -# SHA256 Fingerprint: 88:49:7f:01:60:2f:31:54:24:6a:e2:8c:4d:5a:ef:10:f1:d8:7e:bb:76:62:6f:4a:e0:b7:f9:5b:a7:96:87:99 ------BEGIN CERTIFICATE----- -MIIFVjCCBD6gAwIBAgIQ7is969Qh3hSoYqwE893EATANBgkqhkiG9w0BAQUFADCB -8zELMAkGA1UEBhMCRVMxOzA5BgNVBAoTMkFnZW5jaWEgQ2F0YWxhbmEgZGUgQ2Vy -dGlmaWNhY2lvIChOSUYgUS0wODAxMTc2LUkpMSgwJgYDVQQLEx9TZXJ2ZWlzIFB1 -YmxpY3MgZGUgQ2VydGlmaWNhY2lvMTUwMwYDVQQLEyxWZWdldSBodHRwczovL3d3 -dy5jYXRjZXJ0Lm5ldC92ZXJhcnJlbCAoYykwMzE1MDMGA1UECxMsSmVyYXJxdWlh -IEVudGl0YXRzIGRlIENlcnRpZmljYWNpbyBDYXRhbGFuZXMxDzANBgNVBAMTBkVD -LUFDQzAeFw0wMzAxMDcyMzAwMDBaFw0zMTAxMDcyMjU5NTlaMIHzMQswCQYDVQQG -EwJFUzE7MDkGA1UEChMyQWdlbmNpYSBDYXRhbGFuYSBkZSBDZXJ0aWZpY2FjaW8g -KE5JRiBRLTA4MDExNzYtSSkxKDAmBgNVBAsTH1NlcnZlaXMgUHVibGljcyBkZSBD -ZXJ0aWZpY2FjaW8xNTAzBgNVBAsTLFZlZ2V1IGh0dHBzOi8vd3d3LmNhdGNlcnQu -bmV0L3ZlcmFycmVsIChjKTAzMTUwMwYDVQQLEyxKZXJhcnF1aWEgRW50aXRhdHMg -ZGUgQ2VydGlmaWNhY2lvIENhdGFsYW5lczEPMA0GA1UEAxMGRUMtQUNDMIIBIjAN -BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsyLHT+KXQpWIR4NA9h0X84NzJB5R -85iKw5K4/0CQBXCHYMkAqbWUZRkiFRfCQ2xmRJoNBD45b6VLeqpjt4pEndljkYRm -4CgPukLjbo73FCeTae6RDqNfDrHrZqJyTxIThmV6PttPB/SnCWDaOkKZx7J/sxaV -HMf5NLWUhdWZXqBIoH7nF2W4onW4HvPlQn2v7fOKSGRdghST2MDk/7NQcvJ29rNd -QlB50JQ+awwAvthrDk4q7D7SzIKiGGUzE3eeml0aE9jD2z3Il3rucO2n5nzbcc8t -lGLfbdb1OL4/pYUKGbio2Al1QnDE6u/LDsg0qBIimAy4E5S2S+zw0JDnJwIDAQAB -o4HjMIHgMB0GA1UdEQQWMBSBEmVjX2FjY0BjYXRjZXJ0Lm5ldDAPBgNVHRMBAf8E -BTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUoMOLRKo3pUW/l4Ba0fF4 -opvpXY0wfwYDVR0gBHgwdjB0BgsrBgEEAfV4AQMBCjBlMCwGCCsGAQUFBwIBFiBo -dHRwczovL3d3dy5jYXRjZXJ0Lm5ldC92ZXJhcnJlbDA1BggrBgEFBQcCAjApGidW -ZWdldSBodHRwczovL3d3dy5jYXRjZXJ0Lm5ldC92ZXJhcnJlbCAwDQYJKoZIhvcN -AQEFBQADggEBAKBIW4IB9k1IuDlVNZyAelOZ1Vr/sXE7zDkJlF7W2u++AVtd0x7Y -/X1PzaBB4DSTv8vihpw3kpBWHNzrKQXlxJ7HNd+KDM3FIUPpqojlNcAZQmNaAl6k -SBg6hW/cnbw/nZzBh7h6YQjpdwt/cKt63dmXLGQehb+8dJahw3oS7AwaboMMPOhy -Rp/7SNVel+axofjk70YllJyJ22k4vuxcDlbHZVHlUIiIv0LVKz3l+bqeLrPK9HOS -Agu+TGbrIP65y7WZf+a2E/rKS03Z7lNGBjvGTq2TWoF+bCpLagVFjPIhpDGQh2xl -nJ2lYJU6Un/10asIbvPuW/mIPX64b24D5EI= ------END CERTIFICATE----- - -# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2011 O=Hellenic Academic and Research Institutions Cert. Authority -# Subject: CN=Hellenic Academic and Research Institutions RootCA 2011 O=Hellenic Academic and Research Institutions Cert. Authority -# Label: "Hellenic Academic and Research Institutions RootCA 2011" -# Serial: 0 -# MD5 Fingerprint: 73:9f:4c:4b:73:5b:79:e9:fa:ba:1c:ef:6e:cb:d5:c9 -# SHA1 Fingerprint: fe:45:65:9b:79:03:5b:98:a1:61:b5:51:2e:ac:da:58:09:48:22:4d -# SHA256 Fingerprint: bc:10:4f:15:a4:8b:e7:09:dc:a5:42:a7:e1:d4:b9:df:6f:05:45:27:e8:02:ea:a9:2d:59:54:44:25:8a:fe:71 ------BEGIN CERTIFICATE----- -MIIEMTCCAxmgAwIBAgIBADANBgkqhkiG9w0BAQUFADCBlTELMAkGA1UEBhMCR1Ix -RDBCBgNVBAoTO0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1 -dGlvbnMgQ2VydC4gQXV0aG9yaXR5MUAwPgYDVQQDEzdIZWxsZW5pYyBBY2FkZW1p -YyBhbmQgUmVzZWFyY2ggSW5zdGl0dXRpb25zIFJvb3RDQSAyMDExMB4XDTExMTIw -NjEzNDk1MloXDTMxMTIwMTEzNDk1MlowgZUxCzAJBgNVBAYTAkdSMUQwQgYDVQQK -EztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0dXRpb25zIENl -cnQuIEF1dGhvcml0eTFAMD4GA1UEAxM3SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl -c2VhcmNoIEluc3RpdHV0aW9ucyBSb290Q0EgMjAxMTCCASIwDQYJKoZIhvcNAQEB -BQADggEPADCCAQoCggEBAKlTAOMupvaO+mDYLZU++CwqVE7NuYRhlFhPjz2L5EPz -dYmNUeTDN9KKiE15HrcS3UN4SoqS5tdI1Q+kOilENbgH9mgdVc04UfCMJDGFr4PJ -fel3r+0ae50X+bOdOFAPplp5kYCvN66m0zH7tSYJnTxa71HFK9+WXesyHgLacEns -bgzImjeN9/E2YEsmLIKe0HjzDQ9jpFEw4fkrJxIH2Oq9GGKYsFk3fb7u8yBRQlqD -75O6aRXxYp2fmTmCobd0LovUxQt7L/DICto9eQqakxylKHJzkUOap9FNhYS5qXSP -FEDH3N6sQWRstBmbAmNtJGSPRLIl6s5ddAxjMlyNh+UCAwEAAaOBiTCBhjAPBgNV -HRMBAf8EBTADAQH/MAsGA1UdDwQEAwIBBjAdBgNVHQ4EFgQUppFC/RNhSiOeCKQp -5dgTBCPuQSUwRwYDVR0eBEAwPqA8MAWCAy5ncjAFggMuZXUwBoIELmVkdTAGggQu -b3JnMAWBAy5ncjAFgQMuZXUwBoEELmVkdTAGgQQub3JnMA0GCSqGSIb3DQEBBQUA -A4IBAQAf73lB4XtuP7KMhjdCSk4cNx6NZrokgclPEg8hwAOXhiVtXdMiKahsog2p -6z0GW5k6x8zDmjR/qw7IThzh+uTczQ2+vyT+bOdrwg3IBp5OjWEopmr95fZi6hg8 -TqBTnbI6nOulnJEWtk2C4AwFSKls9cz4y51JtPACpf1wA+2KIaWuE4ZJwzNzvoc7 -dIsXRSZMFpGD/md9zU1jZ/rzAxKWeAaNsWftjj++n08C9bMJL/NMh98qy5V8Acys -Nnq/onN694/BtZqhFLKPM58N7yLcZnuEvUUXBj08yrl3NI/K6s8/MT7jiOOASSXI -l7WdmplNsDz4SgCbZN2fOUvRJ9e4 ------END CERTIFICATE----- - # Issuer: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967 # Subject: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967 # Label: "Actalis Authentication Root CA" @@ -1867,50 +1648,6 @@ HL/EVlP6Y2XQ8xwOFvVrhlhNGNTkDY6lnVuR3HYkUD/GKvvZt5y11ubQ2egZixVx SK236thZiNSQvxaz2emsWWFUyBy6ysHK4bkgTI86k4mloMy/0/Z1pHWWbVY= -----END CERTIFICATE----- -# Issuer: CN=E-Tugra Certification Authority O=E-Tu\u011fra EBG Bili\u015fim Teknolojileri ve Hizmetleri A.\u015e. OU=E-Tugra Sertifikasyon Merkezi -# Subject: CN=E-Tugra Certification Authority O=E-Tu\u011fra EBG Bili\u015fim Teknolojileri ve Hizmetleri A.\u015e. OU=E-Tugra Sertifikasyon Merkezi -# Label: "E-Tugra Certification Authority" -# Serial: 7667447206703254355 -# MD5 Fingerprint: b8:a1:03:63:b0:bd:21:71:70:8a:6f:13:3a:bb:79:49 -# SHA1 Fingerprint: 51:c6:e7:08:49:06:6e:f3:92:d4:5c:a0:0d:6d:a3:62:8f:c3:52:39 -# SHA256 Fingerprint: b0:bf:d5:2b:b0:d7:d9:bd:92:bf:5d:4d:c1:3d:a2:55:c0:2c:54:2f:37:83:65:ea:89:39:11:f5:5e:55:f2:3c ------BEGIN CERTIFICATE----- -MIIGSzCCBDOgAwIBAgIIamg+nFGby1MwDQYJKoZIhvcNAQELBQAwgbIxCzAJBgNV -BAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExQDA+BgNVBAoMN0UtVHXEn3JhIEVCRyBC -aWxpxZ9pbSBUZWtub2xvamlsZXJpIHZlIEhpem1ldGxlcmkgQS7Fni4xJjAkBgNV -BAsMHUUtVHVncmEgU2VydGlmaWthc3lvbiBNZXJrZXppMSgwJgYDVQQDDB9FLVR1 -Z3JhIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTEzMDMwNTEyMDk0OFoXDTIz -MDMwMzEyMDk0OFowgbIxCzAJBgNVBAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExQDA+ -BgNVBAoMN0UtVHXEn3JhIEVCRyBCaWxpxZ9pbSBUZWtub2xvamlsZXJpIHZlIEhp -em1ldGxlcmkgQS7Fni4xJjAkBgNVBAsMHUUtVHVncmEgU2VydGlmaWthc3lvbiBN -ZXJrZXppMSgwJgYDVQQDDB9FLVR1Z3JhIENlcnRpZmljYXRpb24gQXV0aG9yaXR5 -MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA4vU/kwVRHoViVF56C/UY -B4Oufq9899SKa6VjQzm5S/fDxmSJPZQuVIBSOTkHS0vdhQd2h8y/L5VMzH2nPbxH -D5hw+IyFHnSOkm0bQNGZDbt1bsipa5rAhDGvykPL6ys06I+XawGb1Q5KCKpbknSF -Q9OArqGIW66z6l7LFpp3RMih9lRozt6Plyu6W0ACDGQXwLWTzeHxE2bODHnv0ZEo -q1+gElIwcxmOj+GMB6LDu0rw6h8VqO4lzKRG+Bsi77MOQ7osJLjFLFzUHPhdZL3D -k14opz8n8Y4e0ypQBaNV2cvnOVPAmJ6MVGKLJrD3fY185MaeZkJVgkfnsliNZvcH -fC425lAcP9tDJMW/hkd5s3kc91r0E+xs+D/iWR+V7kI+ua2oMoVJl0b+SzGPWsut -dEcf6ZG33ygEIqDUD13ieU/qbIWGvaimzuT6w+Gzrt48Ue7LE3wBf4QOXVGUnhMM -ti6lTPk5cDZvlsouDERVxcr6XQKj39ZkjFqzAQqptQpHF//vkUAqjqFGOjGY5RH8 -zLtJVor8udBhmm9lbObDyz51Sf6Pp+KJxWfXnUYTTjF2OySznhFlhqt/7x3U+Lzn -rFpct1pHXFXOVbQicVtbC/DP3KBhZOqp12gKY6fgDT+gr9Oq0n7vUaDmUStVkhUX -U8u3Zg5mTPj5dUyQ5xJwx0UCAwEAAaNjMGEwHQYDVR0OBBYEFC7j27JJ0JxUeVz6 -Jyr+zE7S6E5UMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAULuPbsknQnFR5 -XPonKv7MTtLoTlQwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAF -Nzr0TbdF4kV1JI+2d1LoHNgQk2Xz8lkGpD4eKexd0dCrfOAKkEh47U6YA5n+KGCR -HTAduGN8qOY1tfrTYXbm1gdLymmasoR6d5NFFxWfJNCYExL/u6Au/U5Mh/jOXKqY -GwXgAEZKgoClM4so3O0409/lPun++1ndYYRP0lSWE2ETPo+Aab6TR7U1Q9Jauz1c -77NCR807VRMGsAnb/WP2OogKmW9+4c4bU2pEZiNRCHu8W1Ki/QY3OEBhj0qWuJA3 -+GbHeJAAFS6LrVE1Uweoa2iu+U48BybNCAVwzDk/dr2l02cmAYamU9JgO3xDf1WK -vJUawSg5TB9D0pH0clmKuVb8P7Sd2nCcdlqMQ1DujjByTd//SffGqWfZbawCEeI6 -FiWnWAjLb1NBnEg4R2gz0dfHj9R0IdTDBZB6/86WiLEVKV0jq9BgoRJP3vQXzTLl -yb/IQ639Lo7xr+L0mPoSHyDYwKcMhcWQ9DstliaxLL5Mq+ux0orJ23gTDx4JnW2P -AJ8C2sH6H3p6CcRK5ogql5+Ji/03X186zjhZhkuvcQu02PJwT58yE+Owp1fl2tpD -y4Q08ijE6m30Ku/Ba3ba+367hTzSU8JNvnHhRdH9I2cNE3X7z2VnIp2usAnRCf8d -NL/+I5c30jn6PQ0GC7TbO6Orb1wdtn7os4I07QZcJA== ------END CERTIFICATE----- - # Issuer: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center # Subject: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center # Label: "T-TeleSec GlobalRoot Class 2" @@ -2342,27 +2079,6 @@ zzuqQhFkoJ2UOQIReVx7Hfpkue4WQrO/isIJxOzksU0CMQDpKmFHjFJKS04YcPbW RNZu9YO6bVi9JNlWSOrvxKJGgYhqOkbRqZtNyWHa0V1Xahg= -----END CERTIFICATE----- -# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 -# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 -# Label: "GlobalSign ECC Root CA - R4" -# Serial: 14367148294922964480859022125800977897474 -# MD5 Fingerprint: 20:f0:27:68:d1:7e:a0:9d:0e:e6:2a:ca:df:5c:89:8e -# SHA1 Fingerprint: 69:69:56:2e:40:80:f4:24:a1:e7:19:9f:14:ba:f3:ee:58:ab:6a:bb -# SHA256 Fingerprint: be:c9:49:11:c2:95:56:76:db:6c:0a:55:09:86:d7:6e:3b:a0:05:66:7c:44:2c:97:62:b4:fb:b7:73:de:22:8c ------BEGIN CERTIFICATE----- -MIIB4TCCAYegAwIBAgIRKjikHJYKBN5CsiilC+g0mAIwCgYIKoZIzj0EAwIwUDEk -MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI0MRMwEQYDVQQKEwpH -bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX -DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD -QSAtIFI0MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu -MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEuMZ5049sJQ6fLjkZHAOkrprlOQcJ -FspjsbmG+IpXwVfOQvpzofdlQv8ewQCybnMO/8ch5RikqtlxP6jUuc6MHaNCMEAw -DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFFSwe61F -uOJAf/sKbvu+M8k8o4TVMAoGCCqGSM49BAMCA0gAMEUCIQDckqGgE6bPA7DmxCGX -kPoUVy0D7O48027KqGx2vKLeuwIgJ6iFJzWbVsaj8kfSt24bAgAXqmemFZHe+pTs -ewv4n4Q= ------END CERTIFICATE----- - # Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5 # Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5 # Label: "GlobalSign ECC Root CA - R5" @@ -2385,46 +2101,6 @@ KoZIzj0EAwMDaAAwZQIxAOVpEslu28YxuglB4Zf4+/2a4n0Sye18ZNPLBSWLVtmg xwy8p2Fp8fc74SrL+SvzZpA3 -----END CERTIFICATE----- -# Issuer: CN=Staat der Nederlanden EV Root CA O=Staat der Nederlanden -# Subject: CN=Staat der Nederlanden EV Root CA O=Staat der Nederlanden -# Label: "Staat der Nederlanden EV Root CA" -# Serial: 10000013 -# MD5 Fingerprint: fc:06:af:7b:e8:1a:f1:9a:b4:e8:d2:70:1f:c0:f5:ba -# SHA1 Fingerprint: 76:e2:7e:c1:4f:db:82:c1:c0:a6:75:b5:05:be:3d:29:b4:ed:db:bb -# SHA256 Fingerprint: 4d:24:91:41:4c:fe:95:67:46:ec:4c:ef:a6:cf:6f:72:e2:8a:13:29:43:2f:9d:8a:90:7a:c4:cb:5d:ad:c1:5a ------BEGIN CERTIFICATE----- -MIIFcDCCA1igAwIBAgIEAJiWjTANBgkqhkiG9w0BAQsFADBYMQswCQYDVQQGEwJO -TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSkwJwYDVQQDDCBTdGFh -dCBkZXIgTmVkZXJsYW5kZW4gRVYgUm9vdCBDQTAeFw0xMDEyMDgxMTE5MjlaFw0y -MjEyMDgxMTEwMjhaMFgxCzAJBgNVBAYTAk5MMR4wHAYDVQQKDBVTdGFhdCBkZXIg -TmVkZXJsYW5kZW4xKTAnBgNVBAMMIFN0YWF0IGRlciBOZWRlcmxhbmRlbiBFViBS -b290IENBMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA48d+ifkkSzrS -M4M1LGns3Amk41GoJSt5uAg94JG6hIXGhaTK5skuU6TJJB79VWZxXSzFYGgEt9nC -UiY4iKTWO0Cmws0/zZiTs1QUWJZV1VD+hq2kY39ch/aO5ieSZxeSAgMs3NZmdO3d -Z//BYY1jTw+bbRcwJu+r0h8QoPnFfxZpgQNH7R5ojXKhTbImxrpsX23Wr9GxE46p -rfNeaXUmGD5BKyF/7otdBwadQ8QpCiv8Kj6GyzyDOvnJDdrFmeK8eEEzduG/L13l -pJhQDBXd4Pqcfzho0LKmeqfRMb1+ilgnQ7O6M5HTp5gVXJrm0w912fxBmJc+qiXb -j5IusHsMX/FjqTf5m3VpTCgmJdrV8hJwRVXj33NeN/UhbJCONVrJ0yPr08C+eKxC -KFhmpUZtcALXEPlLVPxdhkqHz3/KRawRWrUgUY0viEeXOcDPusBCAUCZSCELa6fS -/ZbV0b5GnUngC6agIk440ME8MLxwjyx1zNDFjFE7PZQIZCZhfbnDZY8UnCHQqv0X -cgOPvZuM5l5Tnrmd74K74bzickFbIZTTRTeU0d8JOV3nI6qaHcptqAqGhYqCvkIH -1vI4gnPah1vlPNOePqc7nvQDs/nxfRN0Av+7oeX6AHkcpmZBiFxgV6YuCcS6/ZrP -px9Aw7vMWgpVSzs4dlG4Y4uElBbmVvMCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB -/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFP6rAJCYniT8qcwaivsnuL8wbqg7 -MA0GCSqGSIb3DQEBCwUAA4ICAQDPdyxuVr5Os7aEAJSrR8kN0nbHhp8dB9O2tLsI -eK9p0gtJ3jPFrK3CiAJ9Brc1AsFgyb/E6JTe1NOpEyVa/m6irn0F3H3zbPB+po3u -2dfOWBfoqSmuc0iH55vKbimhZF8ZE/euBhD/UcabTVUlT5OZEAFTdfETzsemQUHS -v4ilf0X8rLiltTMMgsT7B/Zq5SWEXwbKwYY5EdtYzXc7LMJMD16a4/CrPmEbUCTC -wPTxGfARKbalGAKb12NMcIxHowNDXLldRqANb/9Zjr7dn3LDWyvfjFvO5QxGbJKy -CqNMVEIYFRIYvdr8unRu/8G2oGTYqV9Vrp9canaW2HNnh/tNf1zuacpzEPuKqf2e -vTY4SUmH9A4U8OmHuD+nT3pajnnUk+S7aFKErGzp85hwVXIy+TSrK0m1zSBi5Dp6 -Z2Orltxtrpfs/J92VoguZs9btsmksNcFuuEnL5O7Jiqik7Ab846+HUCjuTaPPoIa -Gl6I6lD4WeKDRikL40Rc4ZW2aZCaFG+XroHPaO+Zmr615+F/+PoTRxZMzG0IQOeL -eG9QgkRQP2YGiqtDhFZKDyAthg710tvSeopLzaXoTvFeJiUBWSOgftL2fiFX1ye8 -FVdMpEbB4IMeDExNH08GGeL5qPQ6gqGyeUN51q1veieQA6TqJIc/2b3Z6fJfUEkc -7uzXLg== ------END CERTIFICATE----- - # Issuer: CN=IdenTrust Commercial Root CA 1 O=IdenTrust # Subject: CN=IdenTrust Commercial Root CA 1 O=IdenTrust # Label: "IdenTrust Commercial Root CA 1" @@ -3032,116 +2708,6 @@ T8p+ck0LcIymSLumoRT2+1hEmRSuqguTaaApJUqlyyvdimYHFngVV3Eb7PVHhPOe MTd61X8kreS8/f3MboPoDKi3QWwH3b08hpcv0g== -----END CERTIFICATE----- -# Issuer: CN=TrustCor RootCert CA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority -# Subject: CN=TrustCor RootCert CA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority -# Label: "TrustCor RootCert CA-1" -# Serial: 15752444095811006489 -# MD5 Fingerprint: 6e:85:f1:dc:1a:00:d3:22:d5:b2:b2:ac:6b:37:05:45 -# SHA1 Fingerprint: ff:bd:cd:e7:82:c8:43:5e:3c:6f:26:86:5c:ca:a8:3a:45:5b:c3:0a -# SHA256 Fingerprint: d4:0e:9c:86:cd:8f:e4:68:c1:77:69:59:f4:9e:a7:74:fa:54:86:84:b6:c4:06:f3:90:92:61:f4:dc:e2:57:5c ------BEGIN CERTIFICATE----- -MIIEMDCCAxigAwIBAgIJANqb7HHzA7AZMA0GCSqGSIb3DQEBCwUAMIGkMQswCQYD -VQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEgQ2l0eTEk -MCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYDVQQLDB5U -cnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxHzAdBgNVBAMMFlRydXN0Q29y -IFJvb3RDZXJ0IENBLTEwHhcNMTYwMjA0MTIzMjE2WhcNMjkxMjMxMTcyMzE2WjCB -pDELMAkGA1UEBhMCUEExDzANBgNVBAgMBlBhbmFtYTEUMBIGA1UEBwwLUGFuYW1h -IENpdHkxJDAiBgNVBAoMG1RydXN0Q29yIFN5c3RlbXMgUy4gZGUgUi5MLjEnMCUG -A1UECwweVHJ1c3RDb3IgQ2VydGlmaWNhdGUgQXV0aG9yaXR5MR8wHQYDVQQDDBZU -cnVzdENvciBSb290Q2VydCBDQS0xMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB -CgKCAQEAv463leLCJhJrMxnHQFgKq1mqjQCj/IDHUHuO1CAmujIS2CNUSSUQIpid -RtLByZ5OGy4sDjjzGiVoHKZaBeYei0i/mJZ0PmnK6bV4pQa81QBeCQryJ3pS/C3V -seq0iWEk8xoT26nPUu0MJLq5nux+AHT6k61sKZKuUbS701e/s/OojZz0JEsq1pme -9J7+wH5COucLlVPat2gOkEz7cD+PSiyU8ybdY2mplNgQTsVHCJCZGxdNuWxu72CV -EY4hgLW9oHPY0LJ3xEXqWib7ZnZ2+AYfYW0PVcWDtxBWcgYHpfOxGgMFZA6dWorW -hnAbJN7+KIor0Gqw/Hqi3LJ5DotlDwIDAQABo2MwYTAdBgNVHQ4EFgQU7mtJPHo/ -DeOxCbeKyKsZn3MzUOcwHwYDVR0jBBgwFoAU7mtJPHo/DeOxCbeKyKsZn3MzUOcw -DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcNAQELBQAD -ggEBACUY1JGPE+6PHh0RU9otRCkZoB5rMZ5NDp6tPVxBb5UrJKF5mDo4Nvu7Zp5I -/5CQ7z3UuJu0h3U/IJvOcs+hVcFNZKIZBqEHMwwLKeXx6quj7LUKdJDHfXLy11yf -ke+Ri7fc7Waiz45mO7yfOgLgJ90WmMCV1Aqk5IGadZQ1nJBfiDcGrVmVCrDRZ9MZ -yonnMlo2HD6CqFqTvsbQZJG2z9m2GM/bftJlo6bEjhcxwft+dtvTheNYsnd6djts -L1Ac59v2Z3kf9YKVmgenFK+P3CghZwnS1k1aHBkcjndcw5QkPTJrS37UeJSDvjdN -zl/HHk484IkzlQsPpTLWPFp5LBk= ------END CERTIFICATE----- - -# Issuer: CN=TrustCor RootCert CA-2 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority -# Subject: CN=TrustCor RootCert CA-2 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority -# Label: "TrustCor RootCert CA-2" -# Serial: 2711694510199101698 -# MD5 Fingerprint: a2:e1:f8:18:0b:ba:45:d5:c7:41:2a:bb:37:52:45:64 -# SHA1 Fingerprint: b8:be:6d:cb:56:f1:55:b9:63:d4:12:ca:4e:06:34:c7:94:b2:1c:c0 -# SHA256 Fingerprint: 07:53:e9:40:37:8c:1b:d5:e3:83:6e:39:5d:ae:a5:cb:83:9e:50:46:f1:bd:0e:ae:19:51:cf:10:fe:c7:c9:65 ------BEGIN CERTIFICATE----- -MIIGLzCCBBegAwIBAgIIJaHfyjPLWQIwDQYJKoZIhvcNAQELBQAwgaQxCzAJBgNV -BAYTAlBBMQ8wDQYDVQQIDAZQYW5hbWExFDASBgNVBAcMC1BhbmFtYSBDaXR5MSQw -IgYDVQQKDBtUcnVzdENvciBTeXN0ZW1zIFMuIGRlIFIuTC4xJzAlBgNVBAsMHlRy -dXN0Q29yIENlcnRpZmljYXRlIEF1dGhvcml0eTEfMB0GA1UEAwwWVHJ1c3RDb3Ig -Um9vdENlcnQgQ0EtMjAeFw0xNjAyMDQxMjMyMjNaFw0zNDEyMzExNzI2MzlaMIGk -MQswCQYDVQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEg -Q2l0eTEkMCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYD -VQQLDB5UcnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxHzAdBgNVBAMMFlRy -dXN0Q29yIFJvb3RDZXJ0IENBLTIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK -AoICAQCnIG7CKqJiJJWQdsg4foDSq8GbZQWU9MEKENUCrO2fk8eHyLAnK0IMPQo+ -QVqedd2NyuCb7GgypGmSaIwLgQ5WoD4a3SwlFIIvl9NkRvRUqdw6VC0xK5mC8tkq -1+9xALgxpL56JAfDQiDyitSSBBtlVkxs1Pu2YVpHI7TYabS3OtB0PAx1oYxOdqHp -2yqlO/rOsP9+aij9JxzIsekp8VduZLTQwRVtDr4uDkbIXvRR/u8OYzo7cbrPb1nK -DOObXUm4TOJXsZiKQlecdu/vvdFoqNL0Cbt3Nb4lggjEFixEIFapRBF37120Hape -az6LMvYHL1cEksr1/p3C6eizjkxLAjHZ5DxIgif3GIJ2SDpxsROhOdUuxTTCHWKF -3wP+TfSvPd9cW436cOGlfifHhi5qjxLGhF5DUVCcGZt45vz27Ud+ez1m7xMTiF88 -oWP7+ayHNZ/zgp6kPwqcMWmLmaSISo5uZk3vFsQPeSghYA2FFn3XVDjxklb9tTNM -g9zXEJ9L/cb4Qr26fHMC4P99zVvh1Kxhe1fVSntb1IVYJ12/+CtgrKAmrhQhJ8Z3 -mjOAPF5GP/fDsaOGM8boXg25NSyqRsGFAnWAoOsk+xWq5Gd/bnc/9ASKL3x74xdh -8N0JqSDIvgmk0H5Ew7IwSjiqqewYmgeCK9u4nBit2uBGF6zPXQIDAQABo2MwYTAd -BgNVHQ4EFgQU2f4hQG6UnrybPZx9mCAZ5YwwYrIwHwYDVR0jBBgwFoAU2f4hQG6U -nrybPZx9mCAZ5YwwYrIwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYw -DQYJKoZIhvcNAQELBQADggIBAJ5Fngw7tu/hOsh80QA9z+LqBrWyOrsGS2h60COX -dKcs8AjYeVrXWoSK2BKaG9l9XE1wxaX5q+WjiYndAfrs3fnpkpfbsEZC89NiqpX+ -MWcUaViQCqoL7jcjx1BRtPV+nuN79+TMQjItSQzL/0kMmx40/W5ulop5A7Zv2wnL -/V9lFDfhOPXzYRZY5LVtDQsEGz9QLX+zx3oaFoBg+Iof6Rsqxvm6ARppv9JYx1RX -CI/hOWB3S6xZhBqI8d3LT3jX5+EzLfzuQfogsL7L9ziUwOHQhQ+77Sxzq+3+knYa -ZH9bDTMJBzN7Bj8RpFxwPIXAz+OQqIN3+tvmxYxoZxBnpVIt8MSZj3+/0WvitUfW -2dCFmU2Umw9Lje4AWkcdEQOsQRivh7dvDDqPys/cA8GiCcjl/YBeyGBCARsaU1q7 -N6a3vLqE6R5sGtRk2tRD/pOLS/IseRYQ1JMLiI+h2IYURpFHmygk71dSTlxCnKr3 -Sewn6EAes6aJInKc9Q0ztFijMDvd1GpUk74aTfOTlPf8hAs/hCBcNANExdqtvArB -As8e5ZTZ845b2EzwnexhF7sUMlQMAimTHpKG9n/v55IFDlndmQguLvqcAFLTxWYp -5KeXRKQOKIETNcX2b2TmQcTVL8w0RSXPQQCWPUouwpaYT05KnJe32x+SMsj/D1Fu -1uwJ ------END CERTIFICATE----- - -# Issuer: CN=TrustCor ECA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority -# Subject: CN=TrustCor ECA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority -# Label: "TrustCor ECA-1" -# Serial: 9548242946988625984 -# MD5 Fingerprint: 27:92:23:1d:0a:f5:40:7c:e9:e6:6b:9d:d8:f5:e7:6c -# SHA1 Fingerprint: 58:d1:df:95:95:67:6b:63:c0:f0:5b:1c:17:4d:8b:84:0b:c8:78:bd -# SHA256 Fingerprint: 5a:88:5d:b1:9c:01:d9:12:c5:75:93:88:93:8c:af:bb:df:03:1a:b2:d4:8e:91:ee:15:58:9b:42:97:1d:03:9c ------BEGIN CERTIFICATE----- -MIIEIDCCAwigAwIBAgIJAISCLF8cYtBAMA0GCSqGSIb3DQEBCwUAMIGcMQswCQYD -VQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEgQ2l0eTEk -MCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYDVQQLDB5U -cnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxFzAVBgNVBAMMDlRydXN0Q29y -IEVDQS0xMB4XDTE2MDIwNDEyMzIzM1oXDTI5MTIzMTE3MjgwN1owgZwxCzAJBgNV -BAYTAlBBMQ8wDQYDVQQIDAZQYW5hbWExFDASBgNVBAcMC1BhbmFtYSBDaXR5MSQw -IgYDVQQKDBtUcnVzdENvciBTeXN0ZW1zIFMuIGRlIFIuTC4xJzAlBgNVBAsMHlRy -dXN0Q29yIENlcnRpZmljYXRlIEF1dGhvcml0eTEXMBUGA1UEAwwOVHJ1c3RDb3Ig -RUNBLTEwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDPj+ARtZ+odnbb -3w9U73NjKYKtR8aja+3+XzP4Q1HpGjORMRegdMTUpwHmspI+ap3tDvl0mEDTPwOA -BoJA6LHip1GnHYMma6ve+heRK9jGrB6xnhkB1Zem6g23xFUfJ3zSCNV2HykVh0A5 -3ThFEXXQmqc04L/NyFIduUd+Dbi7xgz2c1cWWn5DkR9VOsZtRASqnKmcp0yJF4Ou -owReUoCLHhIlERnXDH19MURB6tuvsBzvgdAsxZohmz3tQjtQJvLsznFhBmIhVE5/ -wZ0+fyCMgMsq2JdiyIMzkX2woloPV+g7zPIlstR8L+xNxqE6FXrntl019fZISjZF -ZtS6mFjBAgMBAAGjYzBhMB0GA1UdDgQWBBREnkj1zG1I1KBLf/5ZJC+Dl5mahjAf -BgNVHSMEGDAWgBREnkj1zG1I1KBLf/5ZJC+Dl5mahjAPBgNVHRMBAf8EBTADAQH/ -MA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAQEABT41XBVwm8nHc2Fv -civUwo/yQ10CzsSUuZQRg2dd4mdsdXa/uwyqNsatR5Nj3B5+1t4u/ukZMjgDfxT2 -AHMsWbEhBuH7rBiVDKP/mZb3Kyeb1STMHd3BOuCYRLDE5D53sXOpZCz2HAF8P11F -hcCF5yWPldwX8zyfGm6wyuMdKulMY/okYWLW2n62HGz1Ah3UKt1VkOsqEUc8Ll50 -soIipX1TH0XsJ5F95yIW6MBoNtjG8U+ARDL54dHRHareqKucBK+tIA5kmE2la8BI -WJZpTdwHjFGTot+fDz2LYLSCjaoITmJF4PkL0uDgPFveXHEnJcLmA4GLEFPjx1Wi -tJ/X5g== ------END CERTIFICATE----- - # Issuer: CN=SSL.com Root Certification Authority RSA O=SSL Corporation # Subject: CN=SSL.com Root Certification Authority RSA O=SSL Corporation # Label: "SSL.com Root Certification Authority RSA" @@ -3337,126 +2903,6 @@ rYy0UGYwEAYJKwYBBAGCNxUBBAMCAQAwCgYIKoZIzj0EAwMDaAAwZQIwJsdpW9zV Mgj/mkkCtojeFK9dbJlxjRo/i9fgojaGHAeCOnZT/cKi7e97sIBPWA9LUzm9 -----END CERTIFICATE----- -# Issuer: CN=GTS Root R1 O=Google Trust Services LLC -# Subject: CN=GTS Root R1 O=Google Trust Services LLC -# Label: "GTS Root R1" -# Serial: 146587175971765017618439757810265552097 -# MD5 Fingerprint: 82:1a:ef:d4:d2:4a:f2:9f:e2:3d:97:06:14:70:72:85 -# SHA1 Fingerprint: e1:c9:50:e6:ef:22:f8:4c:56:45:72:8b:92:20:60:d7:d5:a7:a3:e8 -# SHA256 Fingerprint: 2a:57:54:71:e3:13:40:bc:21:58:1c:bd:2c:f1:3e:15:84:63:20:3e:ce:94:bc:f9:d3:cc:19:6b:f0:9a:54:72 ------BEGIN CERTIFICATE----- -MIIFWjCCA0KgAwIBAgIQbkepxUtHDA3sM9CJuRz04TANBgkqhkiG9w0BAQwFADBH -MQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExM -QzEUMBIGA1UEAxMLR1RTIFJvb3QgUjEwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIy -MDAwMDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNl -cnZpY2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjEwggIiMA0GCSqGSIb3DQEB -AQUAA4ICDwAwggIKAoICAQC2EQKLHuOhd5s73L+UPreVp0A8of2C+X0yBoJx9vaM -f/vo27xqLpeXo4xL+Sv2sfnOhB2x+cWX3u+58qPpvBKJXqeqUqv4IyfLpLGcY9vX -mX7wCl7raKb0xlpHDU0QM+NOsROjyBhsS+z8CZDfnWQpJSMHobTSPS5g4M/SCYe7 -zUjwTcLCeoiKu7rPWRnWr4+wB7CeMfGCwcDfLqZtbBkOtdh+JhpFAz2weaSUKK0P -fyblqAj+lug8aJRT7oM6iCsVlgmy4HqMLnXWnOunVmSPlk9orj2XwoSPwLxAwAtc -vfaHszVsrBhQf4TgTM2S0yDpM7xSma8ytSmzJSq0SPly4cpk9+aCEI3oncKKiPo4 -Zor8Y/kB+Xj9e1x3+naH+uzfsQ55lVe0vSbv1gHR6xYKu44LtcXFilWr06zqkUsp -zBmkMiVOKvFlRNACzqrOSbTqn3yDsEB750Orp2yjj32JgfpMpf/VjsPOS+C12LOO -Rc92wO1AK/1TD7Cn1TsNsYqiA94xrcx36m97PtbfkSIS5r762DL8EGMUUXLeXdYW -k70paDPvOmbsB4om3xPXV2V4J95eSRQAogB/mqghtqmxlbCluQ0WEdrHbEg8QOB+ -DVrNVjzRlwW5y0vtOUucxD/SVRNuJLDWcfr0wbrM7Rv1/oFB2ACYPTrIrnqYNxgF -lQIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV -HQ4EFgQU5K8rJnEaK0gnhS9SZizv8IkTcT4wDQYJKoZIhvcNAQEMBQADggIBADiW -Cu49tJYeX++dnAsznyvgyv3SjgofQXSlfKqE1OXyHuY3UjKcC9FhHb8owbZEKTV1 -d5iyfNm9dKyKaOOpMQkpAWBz40d8U6iQSifvS9efk+eCNs6aaAyC58/UEBZvXw6Z -XPYfcX3v73svfuo21pdwCxXu11xWajOl40k4DLh9+42FpLFZXvRq4d2h9mREruZR -gyFmxhE+885H7pwoHyXa/6xmld01D1zvICxi/ZG6qcz8WpyTgYMpl0p8WnK0OdC3 -d8t5/Wk6kjftbjhlRn7pYL15iJdfOBL07q9bgsiG1eGZbYwE8na6SfZu6W0eX6Dv -J4J2QPim01hcDyxC2kLGe4g0x8HYRZvBPsVhHdljUEn2NIVq4BjFbkerQUIpm/Zg -DdIx02OYI5NaAIFItO/Nis3Jz5nu2Z6qNuFoS3FJFDYoOj0dzpqPJeaAcWErtXvM -+SUWgeExX6GjfhaknBZqlxi9dnKlC54dNuYvoS++cJEPqOba+MSSQGwlfnuzCdyy -F62ARPBopY+Udf90WuioAnwMCeKpSwughQtiue+hMZL77/ZRBIls6Kl0obsXs7X9 -SQ98POyDGCBDTtWTurQ0sR8WNh8M5mQ5Fkzc4P4dyKliPUDqysU0ArSuiYgzNdws -E3PYJ/HQcu51OyLemGhmW/HGY0dVHLqlCFF1pkgl ------END CERTIFICATE----- - -# Issuer: CN=GTS Root R2 O=Google Trust Services LLC -# Subject: CN=GTS Root R2 O=Google Trust Services LLC -# Label: "GTS Root R2" -# Serial: 146587176055767053814479386953112547951 -# MD5 Fingerprint: 44:ed:9a:0e:a4:09:3b:00:f2:ae:4c:a3:c6:61:b0:8b -# SHA1 Fingerprint: d2:73:96:2a:2a:5e:39:9f:73:3f:e1:c7:1e:64:3f:03:38:34:fc:4d -# SHA256 Fingerprint: c4:5d:7b:b0:8e:6d:67:e6:2e:42:35:11:0b:56:4e:5f:78:fd:92:ef:05:8c:84:0a:ea:4e:64:55:d7:58:5c:60 ------BEGIN CERTIFICATE----- -MIIFWjCCA0KgAwIBAgIQbkepxlqz5yDFMJo/aFLybzANBgkqhkiG9w0BAQwFADBH -MQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExM -QzEUMBIGA1UEAxMLR1RTIFJvb3QgUjIwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIy -MDAwMDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNl -cnZpY2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjIwggIiMA0GCSqGSIb3DQEB -AQUAA4ICDwAwggIKAoICAQDO3v2m++zsFDQ8BwZabFn3GTXd98GdVarTzTukk3Lv -CvptnfbwhYBboUhSnznFt+4orO/LdmgUud+tAWyZH8QiHZ/+cnfgLFuv5AS/T3Kg -GjSY6Dlo7JUle3ah5mm5hRm9iYz+re026nO8/4Piy33B0s5Ks40FnotJk9/BW9Bu -XvAuMC6C/Pq8tBcKSOWIm8Wba96wyrQD8Nr0kLhlZPdcTK3ofmZemde4wj7I0BOd -re7kRXuJVfeKH2JShBKzwkCX44ofR5GmdFrS+LFjKBC4swm4VndAoiaYecb+3yXu -PuWgf9RhD1FLPD+M2uFwdNjCaKH5wQzpoeJ/u1U8dgbuak7MkogwTZq9TwtImoS1 -mKPV+3PBV2HdKFZ1E66HjucMUQkQdYhMvI35ezzUIkgfKtzra7tEscszcTJGr61K -8YzodDqs5xoic4DSMPclQsciOzsSrZYuxsN2B6ogtzVJV+mSSeh2FnIxZyuWfoqj -x5RWIr9qS34BIbIjMt/kmkRtWVtd9QCgHJvGeJeNkP+byKq0rxFROV7Z+2et1VsR -nTKaG73VululycslaVNVJ1zgyjbLiGH7HrfQy+4W+9OmTN6SpdTi3/UGVN4unUu0 -kzCqgc7dGtxRcw1PcOnlthYhGXmy5okLdWTK1au8CcEYof/UVKGFPP0UJAOyh9Ok -twIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV -HQ4EFgQUu//KjiOfT5nK2+JopqUVJxce2Q4wDQYJKoZIhvcNAQEMBQADggIBALZp -8KZ3/p7uC4Gt4cCpx/k1HUCCq+YEtN/L9x0Pg/B+E02NjO7jMyLDOfxA325BS0JT -vhaI8dI4XsRomRyYUpOM52jtG2pzegVATX9lO9ZY8c6DR2Dj/5epnGB3GFW1fgiT -z9D2PGcDFWEJ+YF59exTpJ/JjwGLc8R3dtyDovUMSRqodt6Sm2T4syzFJ9MHwAiA -pJiS4wGWAqoC7o87xdFtCjMwc3i5T1QWvwsHoaRc5svJXISPD+AVdyx+Jn7axEvb -pxZ3B7DNdehyQtaVhJ2Gg/LkkM0JR9SLA3DaWsYDQvTtN6LwG1BUSw7YhN4ZKJmB -R64JGz9I0cNv4rBgF/XuIwKl2gBbbZCr7qLpGzvpx0QnRY5rn/WkhLx3+WuXrD5R -RaIRpsyF7gpo8j5QOHokYh4XIDdtak23CZvJ/KRY9bb7nE4Yu5UC56GtmwfuNmsk -0jmGwZODUNKBRqhfYlcsu2xkiAhu7xNUX90txGdj08+JN7+dIPT7eoOboB6BAFDC -5AwiWVIQ7UNWhwD4FFKnHYuTjKJNRn8nxnGbJN7k2oaLDX5rIMHAnuFl2GqjpuiF -izoHCBy69Y9Vmhh1fuXsgWbRIXOhNUQLgD1bnF5vKheW0YMjiGZt5obicDIvUiLn -yOd/xCxgXS/Dr55FBcOEArf9LAhST4Ldo/DUhgkC ------END CERTIFICATE----- - -# Issuer: CN=GTS Root R3 O=Google Trust Services LLC -# Subject: CN=GTS Root R3 O=Google Trust Services LLC -# Label: "GTS Root R3" -# Serial: 146587176140553309517047991083707763997 -# MD5 Fingerprint: 1a:79:5b:6b:04:52:9c:5d:c7:74:33:1b:25:9a:f9:25 -# SHA1 Fingerprint: 30:d4:24:6f:07:ff:db:91:89:8a:0b:e9:49:66:11:eb:8c:5e:46:e5 -# SHA256 Fingerprint: 15:d5:b8:77:46:19:ea:7d:54:ce:1c:a6:d0:b0:c4:03:e0:37:a9:17:f1:31:e8:a0:4e:1e:6b:7a:71:ba:bc:e5 ------BEGIN CERTIFICATE----- -MIICDDCCAZGgAwIBAgIQbkepx2ypcyRAiQ8DVd2NHTAKBggqhkjOPQQDAzBHMQsw -CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU -MBIGA1UEAxMLR1RTIFJvb3QgUjMwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw -MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp -Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjMwdjAQBgcqhkjOPQIBBgUrgQQA -IgNiAAQfTzOHMymKoYTey8chWEGJ6ladK0uFxh1MJ7x/JlFyb+Kf1qPKzEUURout -736GjOyxfi//qXGdGIRFBEFVbivqJn+7kAHjSxm65FSWRQmx1WyRRK2EE46ajA2A -DDL24CejQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1Ud -DgQWBBTB8Sa6oC2uhYHP0/EqEr24Cmf9vDAKBggqhkjOPQQDAwNpADBmAjEAgFuk -fCPAlaUs3L6JbyO5o91lAFJekazInXJ0glMLfalAvWhgxeG4VDvBNhcl2MG9AjEA -njWSdIUlUfUk7GRSJFClH9voy8l27OyCbvWFGFPouOOaKaqW04MjyaR7YbPMAuhd ------END CERTIFICATE----- - -# Issuer: CN=GTS Root R4 O=Google Trust Services LLC -# Subject: CN=GTS Root R4 O=Google Trust Services LLC -# Label: "GTS Root R4" -# Serial: 146587176229350439916519468929765261721 -# MD5 Fingerprint: 5d:b6:6a:c4:60:17:24:6a:1a:99:a8:4b:ee:5e:b4:26 -# SHA1 Fingerprint: 2a:1d:60:27:d9:4a:b1:0a:1c:4d:91:5c:cd:33:a0:cb:3e:2d:54:cb -# SHA256 Fingerprint: 71:cc:a5:39:1f:9e:79:4b:04:80:25:30:b3:63:e1:21:da:8a:30:43:bb:26:66:2f:ea:4d:ca:7f:c9:51:a4:bd ------BEGIN CERTIFICATE----- -MIICCjCCAZGgAwIBAgIQbkepyIuUtui7OyrYorLBmTAKBggqhkjOPQQDAzBHMQsw -CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU -MBIGA1UEAxMLR1RTIFJvb3QgUjQwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw -MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp -Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjQwdjAQBgcqhkjOPQIBBgUrgQQA -IgNiAATzdHOnaItgrkO4NcWBMHtLSZ37wWHO5t5GvWvVYRg1rkDdc/eJkTBa6zzu -hXyiQHY7qca4R9gq55KRanPpsXI5nymfopjTX15YhmUPoYRlBtHci8nHc8iMai/l -xKvRHYqjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1Ud -DgQWBBSATNbrdP9JNqPV2Py1PsVq8JQdjDAKBggqhkjOPQQDAwNnADBkAjBqUFJ0 -CMRw3J5QdCHojXohw0+WbhXRIjVhLfoIN+4Zba3bssx9BzT1YBkstTTZbyACMANx -sbqjYAuG7ZoIapVon+Kz4ZNkfF6Tpt95LY2F45TPI11xzPKwTdb+mciUqXWi4w== ------END CERTIFICATE----- - # Issuer: CN=UCA Global G2 Root O=UniTrust # Subject: CN=UCA Global G2 Root O=UniTrust # Label: "UCA Global G2 Root" @@ -4255,3 +3701,935 @@ qJZ9ZPskWkoDbGs4xugDQ5r3V7mzKWmTOPQD8rv7gmsHINFSH5pkAnuYZttcTVoP 0ISVoDwUQwbKytu4QTbaakRnh6+v40URFWkIsr4WOZckbxJF0WddCajJFdr60qZf E2Efv4WstK2tBZQIgx51F9NxO5NQI1mg7TyRVJ12AMXDuDjb -----END CERTIFICATE----- + +# Issuer: CN=TunTrust Root CA O=Agence Nationale de Certification Electronique +# Subject: CN=TunTrust Root CA O=Agence Nationale de Certification Electronique +# Label: "TunTrust Root CA" +# Serial: 108534058042236574382096126452369648152337120275 +# MD5 Fingerprint: 85:13:b9:90:5b:36:5c:b6:5e:b8:5a:f8:e0:31:57:b4 +# SHA1 Fingerprint: cf:e9:70:84:0f:e0:73:0f:9d:f6:0c:7f:2c:4b:ee:20:46:34:9c:bb +# SHA256 Fingerprint: 2e:44:10:2a:b5:8c:b8:54:19:45:1c:8e:19:d9:ac:f3:66:2c:af:bc:61:4b:6a:53:96:0a:30:f7:d0:e2:eb:41 +-----BEGIN CERTIFICATE----- +MIIFszCCA5ugAwIBAgIUEwLV4kBMkkaGFmddtLu7sms+/BMwDQYJKoZIhvcNAQEL +BQAwYTELMAkGA1UEBhMCVE4xNzA1BgNVBAoMLkFnZW5jZSBOYXRpb25hbGUgZGUg +Q2VydGlmaWNhdGlvbiBFbGVjdHJvbmlxdWUxGTAXBgNVBAMMEFR1blRydXN0IFJv +b3QgQ0EwHhcNMTkwNDI2MDg1NzU2WhcNNDQwNDI2MDg1NzU2WjBhMQswCQYDVQQG +EwJUTjE3MDUGA1UECgwuQWdlbmNlIE5hdGlvbmFsZSBkZSBDZXJ0aWZpY2F0aW9u +IEVsZWN0cm9uaXF1ZTEZMBcGA1UEAwwQVHVuVHJ1c3QgUm9vdCBDQTCCAiIwDQYJ +KoZIhvcNAQEBBQADggIPADCCAgoCggIBAMPN0/y9BFPdDCA61YguBUtB9YOCfvdZ +n56eY+hz2vYGqU8ftPkLHzmMmiDQfgbU7DTZhrx1W4eI8NLZ1KMKsmwb60ksPqxd +2JQDoOw05TDENX37Jk0bbjBU2PWARZw5rZzJJQRNmpA+TkBuimvNKWfGzC3gdOgF +VwpIUPp6Q9p+7FuaDmJ2/uqdHYVy7BG7NegfJ7/Boce7SBbdVtfMTqDhuazb1YMZ +GoXRlJfXyqNlC/M4+QKu3fZnz8k/9YosRxqZbwUN/dAdgjH8KcwAWJeRTIAAHDOF +li/LQcKLEITDCSSJH7UP2dl3RxiSlGBcx5kDPP73lad9UKGAwqmDrViWVSHbhlnU +r8a83YFuB9tgYv7sEG7aaAH0gxupPqJbI9dkxt/con3YS7qC0lH4Zr8GRuR5KiY2 +eY8fTpkdso8MDhz/yV3A/ZAQprE38806JG60hZC/gLkMjNWb1sjxVj8agIl6qeIb +MlEsPvLfe/ZdeikZjuXIvTZxi11Mwh0/rViizz1wTaZQmCXcI/m4WEEIcb9PuISg +jwBUFfyRbVinljvrS5YnzWuioYasDXxU5mZMZl+QviGaAkYt5IPCgLnPSz7ofzwB +7I9ezX/SKEIBlYrilz0QIX32nRzFNKHsLA4KUiwSVXAkPcvCFDVDXSdOvsC9qnyW +5/yeYa1E0wCXAgMBAAGjYzBhMB0GA1UdDgQWBBQGmpsfU33x9aTI04Y+oXNZtPdE +ITAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFAaamx9TffH1pMjThj6hc1m0 +90QhMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAqgVutt0Vyb+z +xiD2BkewhpMl0425yAA/l/VSJ4hxyXT968pk21vvHl26v9Hr7lxpuhbI87mP0zYu +QEkHDVneixCwSQXi/5E/S7fdAo74gShczNxtr18UnH1YeA32gAm56Q6XKRm4t+v4 +FstVEuTGfbvE7Pi1HE4+Z7/FXxttbUcoqgRYYdZ2vyJ/0Adqp2RT8JeNnYA/u8EH +22Wv5psymsNUk8QcCMNE+3tjEUPRahphanltkE8pjkcFwRJpadbGNjHh/PqAulxP +xOu3Mqz4dWEX1xAZufHSCe96Qp1bWgvUxpVOKs7/B9dPfhgGiPEZtdmYu65xxBzn +dFlY7wyJz4sfdZMaBBSSSFCp61cpABbjNhzI+L/wM9VBD8TMPN3pM0MBkRArHtG5 +Xc0yGYuPjCB31yLEQtyEFpslbei0VXF/sHyz03FJuc9SpAQ/3D2gu68zngowYI7b +nV2UqL1g52KAdoGDDIzMMEZJ4gzSqK/rYXHv5yJiqfdcZGyfFoxnNidF9Ql7v/YQ +CvGwjVRDjAS6oz/v4jXH+XTgbzRB0L9zZVcg+ZtnemZoJE6AZb0QmQZZ8mWvuMZH +u/2QeItBcy6vVR/cO5JyboTT0GFMDcx2V+IthSIVNg3rAZ3r2OvEhJn7wAzMMujj +d9qDRIueVSjAi1jTkD5OGwDxFa2DK5o= +-----END CERTIFICATE----- + +# Issuer: CN=HARICA TLS RSA Root CA 2021 O=Hellenic Academic and Research Institutions CA +# Subject: CN=HARICA TLS RSA Root CA 2021 O=Hellenic Academic and Research Institutions CA +# Label: "HARICA TLS RSA Root CA 2021" +# Serial: 76817823531813593706434026085292783742 +# MD5 Fingerprint: 65:47:9b:58:86:dd:2c:f0:fc:a2:84:1f:1e:96:c4:91 +# SHA1 Fingerprint: 02:2d:05:82:fa:88:ce:14:0c:06:79:de:7f:14:10:e9:45:d7:a5:6d +# SHA256 Fingerprint: d9:5d:0e:8e:da:79:52:5b:f9:be:b1:1b:14:d2:10:0d:32:94:98:5f:0c:62:d9:fa:bd:9c:d9:99:ec:cb:7b:1d +-----BEGIN CERTIFICATE----- +MIIFpDCCA4ygAwIBAgIQOcqTHO9D88aOk8f0ZIk4fjANBgkqhkiG9w0BAQsFADBs +MQswCQYDVQQGEwJHUjE3MDUGA1UECgwuSGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl +c2VhcmNoIEluc3RpdHV0aW9ucyBDQTEkMCIGA1UEAwwbSEFSSUNBIFRMUyBSU0Eg +Um9vdCBDQSAyMDIxMB4XDTIxMDIxOTEwNTUzOFoXDTQ1MDIxMzEwNTUzN1owbDEL +MAkGA1UEBhMCR1IxNzA1BgNVBAoMLkhlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNl +YXJjaCBJbnN0aXR1dGlvbnMgQ0ExJDAiBgNVBAMMG0hBUklDQSBUTFMgUlNBIFJv +b3QgQ0EgMjAyMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAIvC569l +mwVnlskNJLnQDmT8zuIkGCyEf3dRywQRNrhe7Wlxp57kJQmXZ8FHws+RFjZiPTgE +4VGC/6zStGndLuwRo0Xua2s7TL+MjaQenRG56Tj5eg4MmOIjHdFOY9TnuEFE+2uv +a9of08WRiFukiZLRgeaMOVig1mlDqa2YUlhu2wr7a89o+uOkXjpFc5gH6l8Cct4M +pbOfrqkdtx2z/IpZ525yZa31MJQjB/OCFks1mJxTuy/K5FrZx40d/JiZ+yykgmvw +Kh+OC19xXFyuQnspiYHLA6OZyoieC0AJQTPb5lh6/a6ZcMBaD9YThnEvdmn8kN3b +LW7R8pv1GmuebxWMevBLKKAiOIAkbDakO/IwkfN4E8/BPzWr8R0RI7VDIp4BkrcY +AuUR0YLbFQDMYTfBKnya4dC6s1BG7oKsnTH4+yPiAwBIcKMJJnkVU2DzOFytOOqB +AGMUuTNe3QvboEUHGjMJ+E20pwKmafTCWQWIZYVWrkvL4N48fS0ayOn7H6NhStYq +E613TBoYm5EPWNgGVMWX+Ko/IIqmhaZ39qb8HOLubpQzKoNQhArlT4b4UEV4AIHr +W2jjJo3Me1xR9BQsQL4aYB16cmEdH2MtiKrOokWQCPxrvrNQKlr9qEgYRtaQQJKQ +CoReaDH46+0N0x3GfZkYVVYnZS6NRcUk7M7jAgMBAAGjQjBAMA8GA1UdEwEB/wQF +MAMBAf8wHQYDVR0OBBYEFApII6ZgpJIKM+qTW8VX6iVNvRLuMA4GA1UdDwEB/wQE +AwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAPpBIqm5iFSVmewzVjIuJndftTgfvnNAU +X15QvWiWkKQUEapobQk1OUAJ2vQJLDSle1mESSmXdMgHHkdt8s4cUCbjnj1AUz/3 +f5Z2EMVGpdAgS1D0NTsY9FVqQRtHBmg8uwkIYtlfVUKqrFOFrJVWNlar5AWMxaja +H6NpvVMPxP/cyuN+8kyIhkdGGvMA9YCRotxDQpSbIPDRzbLrLFPCU3hKTwSUQZqP +JzLB5UkZv/HywouoCjkxKLR9YjYsTewfM7Z+d21+UPCfDtcRj88YxeMn/ibvBZ3P +zzfF0HvaO7AWhAw6k9a+F9sPPg4ZeAnHqQJyIkv3N3a6dcSFA1pj1bF1BcK5vZSt +jBWZp5N99sXzqnTPBIWUmAD04vnKJGW/4GKvyMX6ssmeVkjaef2WdhW+o45WxLM0 +/L5H9MG0qPzVMIho7suuyWPEdr6sOBjhXlzPrjoiUevRi7PzKzMHVIf6tLITe7pT +BGIBnfHAT+7hOtSLIBD6Alfm78ELt5BGnBkpjNxvoEppaZS3JGWg/6w/zgH7IS79 +aPib8qXPMThcFarmlwDB31qlpzmq6YR/PFGoOtmUW4y/Twhx5duoXNTSpv4Ao8YW +xw/ogM4cKGR0GQjTQuPOAF1/sdwTsOEFy9EgqoZ0njnnkf3/W9b3raYvAwtt41dU +63ZTGI0RmLo= +-----END CERTIFICATE----- + +# Issuer: CN=HARICA TLS ECC Root CA 2021 O=Hellenic Academic and Research Institutions CA +# Subject: CN=HARICA TLS ECC Root CA 2021 O=Hellenic Academic and Research Institutions CA +# Label: "HARICA TLS ECC Root CA 2021" +# Serial: 137515985548005187474074462014555733966 +# MD5 Fingerprint: ae:f7:4c:e5:66:35:d1:b7:9b:8c:22:93:74:d3:4b:b0 +# SHA1 Fingerprint: bc:b0:c1:9d:e9:98:92:70:19:38:57:e9:8d:a7:b4:5d:6e:ee:01:48 +# SHA256 Fingerprint: 3f:99:cc:47:4a:cf:ce:4d:fe:d5:87:94:66:5e:47:8d:15:47:73:9f:2e:78:0f:1b:b4:ca:9b:13:30:97:d4:01 +-----BEGIN CERTIFICATE----- +MIICVDCCAdugAwIBAgIQZ3SdjXfYO2rbIvT/WeK/zjAKBggqhkjOPQQDAzBsMQsw +CQYDVQQGEwJHUjE3MDUGA1UECgwuSGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJlc2Vh +cmNoIEluc3RpdHV0aW9ucyBDQTEkMCIGA1UEAwwbSEFSSUNBIFRMUyBFQ0MgUm9v +dCBDQSAyMDIxMB4XDTIxMDIxOTExMDExMFoXDTQ1MDIxMzExMDEwOVowbDELMAkG +A1UEBhMCR1IxNzA1BgNVBAoMLkhlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJj +aCBJbnN0aXR1dGlvbnMgQ0ExJDAiBgNVBAMMG0hBUklDQSBUTFMgRUNDIFJvb3Qg +Q0EgMjAyMTB2MBAGByqGSM49AgEGBSuBBAAiA2IABDgI/rGgltJ6rK9JOtDA4MM7 +KKrxcm1lAEeIhPyaJmuqS7psBAqIXhfyVYf8MLA04jRYVxqEU+kw2anylnTDUR9Y +STHMmE5gEYd103KUkE+bECUqqHgtvpBBWJAVcqeht6NCMEAwDwYDVR0TAQH/BAUw +AwEB/zAdBgNVHQ4EFgQUyRtTgRL+BNUW0aq8mm+3oJUZbsowDgYDVR0PAQH/BAQD +AgGGMAoGCCqGSM49BAMDA2cAMGQCMBHervjcToiwqfAircJRQO9gcS3ujwLEXQNw +SaSS6sUUiHCm0w2wqsosQJz76YJumgIwK0eaB8bRwoF8yguWGEEbo/QwCZ61IygN +nxS2PFOiTAZpffpskcYqSUXm7LcT4Tps +-----END CERTIFICATE----- + +# Issuer: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 +# Subject: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 +# Label: "Autoridad de Certificacion Firmaprofesional CIF A62634068" +# Serial: 1977337328857672817 +# MD5 Fingerprint: 4e:6e:9b:54:4c:ca:b7:fa:48:e4:90:b1:15:4b:1c:a3 +# SHA1 Fingerprint: 0b:be:c2:27:22:49:cb:39:aa:db:35:5c:53:e3:8c:ae:78:ff:b6:fe +# SHA256 Fingerprint: 57:de:05:83:ef:d2:b2:6e:03:61:da:99:da:9d:f4:64:8d:ef:7e:e8:44:1c:3b:72:8a:fa:9b:cd:e0:f9:b2:6a +-----BEGIN CERTIFICATE----- +MIIGFDCCA/ygAwIBAgIIG3Dp0v+ubHEwDQYJKoZIhvcNAQELBQAwUTELMAkGA1UE +BhMCRVMxQjBABgNVBAMMOUF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIEZpcm1h +cHJvZmVzaW9uYWwgQ0lGIEE2MjYzNDA2ODAeFw0xNDA5MjMxNTIyMDdaFw0zNjA1 +MDUxNTIyMDdaMFExCzAJBgNVBAYTAkVTMUIwQAYDVQQDDDlBdXRvcmlkYWQgZGUg +Q2VydGlmaWNhY2lvbiBGaXJtYXByb2Zlc2lvbmFsIENJRiBBNjI2MzQwNjgwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKlmuO6vj78aI14H9M2uDDUtd9 +thDIAl6zQyrET2qyyhxdKJp4ERppWVevtSBC5IsP5t9bpgOSL/UR5GLXMnE42QQM +cas9UX4PB99jBVzpv5RvwSmCwLTaUbDBPLutN0pcyvFLNg4kq7/DhHf9qFD0sefG +L9ItWY16Ck6WaVICqjaY7Pz6FIMMNx/Jkjd/14Et5cS54D40/mf0PmbR0/RAz15i +NA9wBj4gGFrO93IbJWyTdBSTo3OxDqqHECNZXyAFGUftaI6SEspd/NYrspI8IM/h +X68gvqB2f3bl7BqGYTM+53u0P6APjqK5am+5hyZvQWyIplD9amML9ZMWGxmPsu2b +m8mQ9QEM3xk9Dz44I8kvjwzRAv4bVdZO0I08r0+k8/6vKtMFnXkIoctXMbScyJCy +Z/QYFpM6/EfY0XiWMR+6KwxfXZmtY4laJCB22N/9q06mIqqdXuYnin1oKaPnirja +EbsXLZmdEyRG98Xi2J+Of8ePdG1asuhy9azuJBCtLxTa/y2aRnFHvkLfuwHb9H/T +KI8xWVvTyQKmtFLKbpf7Q8UIJm+K9Lv9nyiqDdVF8xM6HdjAeI9BZzwelGSuewvF +6NkBiDkal4ZkQdU7hwxu+g/GvUgUvzlN1J5Bto+WHWOWk9mVBngxaJ43BjuAiUVh +OSPHG0SjFeUc+JIwuwIDAQABo4HvMIHsMB0GA1UdDgQWBBRlzeurNR4APn7VdMAc +tHNHDhpkLzASBgNVHRMBAf8ECDAGAQH/AgEBMIGmBgNVHSAEgZ4wgZswgZgGBFUd +IAAwgY8wLwYIKwYBBQUHAgEWI2h0dHA6Ly93d3cuZmlybWFwcm9mZXNpb25hbC5j +b20vY3BzMFwGCCsGAQUFBwICMFAeTgBQAGEAcwBlAG8AIABkAGUAIABsAGEAIABC +AG8AbgBhAG4AbwB2AGEAIAA0ADcAIABCAGEAcgBjAGUAbABvAG4AYQAgADAAOAAw +ADEANzAOBgNVHQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQELBQADggIBAHSHKAIrdx9m +iWTtj3QuRhy7qPj4Cx2Dtjqn6EWKB7fgPiDL4QjbEwj4KKE1soCzC1HA01aajTNF +Sa9J8OA9B3pFE1r/yJfY0xgsfZb43aJlQ3CTkBW6kN/oGbDbLIpgD7dvlAceHabJ +hfa9NPhAeGIQcDq+fUs5gakQ1JZBu/hfHAsdCPKxsIl68veg4MSPi3i1O1ilI45P +Vf42O+AMt8oqMEEgtIDNrvx2ZnOorm7hfNoD6JQg5iKj0B+QXSBTFCZX2lSX3xZE +EAEeiGaPcjiT3SC3NL7X8e5jjkd5KAb881lFJWAiMxujX6i6KtoaPc1A6ozuBRWV +1aUsIC+nmCjuRfzxuIgALI9C2lHVnOUTaHFFQ4ueCyE8S1wF3BqfmI7avSKecs2t +CsvMo2ebKHTEm9caPARYpoKdrcd7b/+Alun4jWq9GJAd/0kakFI3ky88Al2CdgtR +5xbHV/g4+afNmyJU72OwFW1TZQNKXkqgsqeOSQBZONXH9IBk9W6VULgRfhVwOEqw +f9DEMnDAGf/JOC0ULGb0QkTmVXYbgBVX/8Cnp6o5qtjTcNAuuuuUavpfNIbnYrX9 +ivAwhZTJryQCL2/W3Wf+47BVTwSYT6RBVuKT0Gro1vP7ZeDOdcQxWQzugsgMYDNK +GbqEZycPvEJdvSRUDewdcAZfpLz6IHxV +-----END CERTIFICATE----- + +# Issuer: CN=vTrus ECC Root CA O=iTrusChina Co.,Ltd. +# Subject: CN=vTrus ECC Root CA O=iTrusChina Co.,Ltd. +# Label: "vTrus ECC Root CA" +# Serial: 630369271402956006249506845124680065938238527194 +# MD5 Fingerprint: de:4b:c1:f5:52:8c:9b:43:e1:3e:8f:55:54:17:8d:85 +# SHA1 Fingerprint: f6:9c:db:b0:fc:f6:02:13:b6:52:32:a6:a3:91:3f:16:70:da:c3:e1 +# SHA256 Fingerprint: 30:fb:ba:2c:32:23:8e:2a:98:54:7a:f9:79:31:e5:50:42:8b:9b:3f:1c:8e:eb:66:33:dc:fa:86:c5:b2:7d:d3 +-----BEGIN CERTIFICATE----- +MIICDzCCAZWgAwIBAgIUbmq8WapTvpg5Z6LSa6Q75m0c1towCgYIKoZIzj0EAwMw +RzELMAkGA1UEBhMCQ04xHDAaBgNVBAoTE2lUcnVzQ2hpbmEgQ28uLEx0ZC4xGjAY +BgNVBAMTEXZUcnVzIEVDQyBSb290IENBMB4XDTE4MDczMTA3MjY0NFoXDTQzMDcz +MTA3MjY0NFowRzELMAkGA1UEBhMCQ04xHDAaBgNVBAoTE2lUcnVzQ2hpbmEgQ28u +LEx0ZC4xGjAYBgNVBAMTEXZUcnVzIEVDQyBSb290IENBMHYwEAYHKoZIzj0CAQYF +K4EEACIDYgAEZVBKrox5lkqqHAjDo6LN/llWQXf9JpRCux3NCNtzslt188+cToL0 +v/hhJoVs1oVbcnDS/dtitN9Ti72xRFhiQgnH+n9bEOf+QP3A2MMrMudwpremIFUd +e4BdS49nTPEQo0IwQDAdBgNVHQ4EFgQUmDnNvtiyjPeyq+GtJK97fKHbH88wDwYD +VR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwCgYIKoZIzj0EAwMDaAAwZQIw +V53dVvHH4+m4SVBrm2nDb+zDfSXkV5UTQJtS0zvzQBm8JsctBp61ezaf9SXUY2sA +AjEA6dPGnlaaKsyh2j/IZivTWJwghfqrkYpwcBE4YGQLYgmRWAD5Tfs0aNoJrSEG +GJTO +-----END CERTIFICATE----- + +# Issuer: CN=vTrus Root CA O=iTrusChina Co.,Ltd. +# Subject: CN=vTrus Root CA O=iTrusChina Co.,Ltd. +# Label: "vTrus Root CA" +# Serial: 387574501246983434957692974888460947164905180485 +# MD5 Fingerprint: b8:c9:37:df:fa:6b:31:84:64:c5:ea:11:6a:1b:75:fc +# SHA1 Fingerprint: 84:1a:69:fb:f5:cd:1a:25:34:13:3d:e3:f8:fc:b8:99:d0:c9:14:b7 +# SHA256 Fingerprint: 8a:71:de:65:59:33:6f:42:6c:26:e5:38:80:d0:0d:88:a1:8d:a4:c6:a9:1f:0d:cb:61:94:e2:06:c5:c9:63:87 +-----BEGIN CERTIFICATE----- +MIIFVjCCAz6gAwIBAgIUQ+NxE9izWRRdt86M/TX9b7wFjUUwDQYJKoZIhvcNAQEL +BQAwQzELMAkGA1UEBhMCQ04xHDAaBgNVBAoTE2lUcnVzQ2hpbmEgQ28uLEx0ZC4x +FjAUBgNVBAMTDXZUcnVzIFJvb3QgQ0EwHhcNMTgwNzMxMDcyNDA1WhcNNDMwNzMx +MDcyNDA1WjBDMQswCQYDVQQGEwJDTjEcMBoGA1UEChMTaVRydXNDaGluYSBDby4s +THRkLjEWMBQGA1UEAxMNdlRydXMgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEBBQAD +ggIPADCCAgoCggIBAL1VfGHTuB0EYgWgrmy3cLRB6ksDXhA/kFocizuwZotsSKYc +IrrVQJLuM7IjWcmOvFjai57QGfIvWcaMY1q6n6MLsLOaXLoRuBLpDLvPbmyAhykU +AyyNJJrIZIO1aqwTLDPxn9wsYTwaP3BVm60AUn/PBLn+NvqcwBauYv6WTEN+VRS+ +GrPSbcKvdmaVayqwlHeFXgQPYh1jdfdr58tbmnDsPmcF8P4HCIDPKNsFxhQnL4Z9 +8Cfe/+Z+M0jnCx5Y0ScrUw5XSmXX+6KAYPxMvDVTAWqXcoKv8R1w6Jz1717CbMdH +flqUhSZNO7rrTOiwCcJlwp2dCZtOtZcFrPUGoPc2BX70kLJrxLT5ZOrpGgrIDajt +J8nU57O5q4IikCc9Kuh8kO+8T/3iCiSn3mUkpF3qwHYw03dQ+A0Em5Q2AXPKBlim +0zvc+gRGE1WKyURHuFE5Gi7oNOJ5y1lKCn+8pu8fA2dqWSslYpPZUxlmPCdiKYZN +pGvu/9ROutW04o5IWgAZCfEF2c6Rsffr6TlP9m8EQ5pV9T4FFL2/s1m02I4zhKOQ +UqqzApVg+QxMaPnu1RcN+HFXtSXkKe5lXa/R7jwXC1pDxaWG6iSe4gUH3DRCEpHW +OXSuTEGC2/KmSNGzm/MzqvOmwMVO9fSddmPmAsYiS8GVP1BkLFTltvA8Kc9XAgMB +AAGjQjBAMB0GA1UdDgQWBBRUYnBj8XWEQ1iO0RYgscasGrz2iTAPBgNVHRMBAf8E +BTADAQH/MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAKbqSSaet +8PFww+SX8J+pJdVrnjT+5hpk9jprUrIQeBqfTNqK2uwcN1LgQkv7bHbKJAs5EhWd +nxEt/Hlk3ODg9d3gV8mlsnZwUKT+twpw1aA08XXXTUm6EdGz2OyC/+sOxL9kLX1j +bhd47F18iMjrjld22VkE+rxSH0Ws8HqA7Oxvdq6R2xCOBNyS36D25q5J08FsEhvM +Kar5CKXiNxTKsbhm7xqC5PD48acWabfbqWE8n/Uxy+QARsIvdLGx14HuqCaVvIiv +TDUHKgLKeBRtRytAVunLKmChZwOgzoy8sHJnxDHO2zTlJQNgJXtxmOTAGytfdELS +S8VZCAeHvsXDf+eW2eHcKJfWjwXj9ZtOyh1QRwVTsMo554WgicEFOwE30z9J4nfr +I8iIZjs9OXYhRvHsXyO466JmdXTBQPfYaJqT4i2pLr0cox7IdMakLXogqzu4sEb9 +b91fUlV1YvCXoHzXOP0l382gmxDPi7g4Xl7FtKYCNqEeXxzP4padKar9mK5S4fNB +UvupLnKWnyfjqnN9+BojZns7q2WwMgFLFT49ok8MKzWixtlnEjUwzXYuFrOZnk1P +Ti07NEPhmg4NpGaXutIcSkwsKouLgU9xGqndXHt7CMUADTdA43x7VF8vhV929ven +sBxXVsFy6K2ir40zSbofitzmdHxghm+Hl3s= +-----END CERTIFICATE----- + +# Issuer: CN=ISRG Root X2 O=Internet Security Research Group +# Subject: CN=ISRG Root X2 O=Internet Security Research Group +# Label: "ISRG Root X2" +# Serial: 87493402998870891108772069816698636114 +# MD5 Fingerprint: d3:9e:c4:1e:23:3c:a6:df:cf:a3:7e:6d:e0:14:e6:e5 +# SHA1 Fingerprint: bd:b1:b9:3c:d5:97:8d:45:c6:26:14:55:f8:db:95:c7:5a:d1:53:af +# SHA256 Fingerprint: 69:72:9b:8e:15:a8:6e:fc:17:7a:57:af:b7:17:1d:fc:64:ad:d2:8c:2f:ca:8c:f1:50:7e:34:45:3c:cb:14:70 +-----BEGIN CERTIFICATE----- +MIICGzCCAaGgAwIBAgIQQdKd0XLq7qeAwSxs6S+HUjAKBggqhkjOPQQDAzBPMQsw +CQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJuZXQgU2VjdXJpdHkgUmVzZWFyY2gg +R3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBYMjAeFw0yMDA5MDQwMDAwMDBaFw00 +MDA5MTcxNjAwMDBaME8xCzAJBgNVBAYTAlVTMSkwJwYDVQQKEyBJbnRlcm5ldCBT +ZWN1cml0eSBSZXNlYXJjaCBHcm91cDEVMBMGA1UEAxMMSVNSRyBSb290IFgyMHYw +EAYHKoZIzj0CAQYFK4EEACIDYgAEzZvVn4CDCuwJSvMWSj5cz3es3mcFDR0HttwW ++1qLFNvicWDEukWVEYmO6gbf9yoWHKS5xcUy4APgHoIYOIvXRdgKam7mAHf7AlF9 +ItgKbppbd9/w+kHsOdx1ymgHDB/qo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0T +AQH/BAUwAwEB/zAdBgNVHQ4EFgQUfEKWrt5LSDv6kviejM9ti6lyN5UwCgYIKoZI +zj0EAwMDaAAwZQIwe3lORlCEwkSHRhtFcP9Ymd70/aTSVaYgLXTWNLxBo1BfASdW +tL4ndQavEi51mI38AjEAi/V3bNTIZargCyzuFJ0nN6T5U6VR5CmD1/iQMVtCnwr1 +/q4AaOeMSQ+2b1tbFfLn +-----END CERTIFICATE----- + +# Issuer: CN=HiPKI Root CA - G1 O=Chunghwa Telecom Co., Ltd. +# Subject: CN=HiPKI Root CA - G1 O=Chunghwa Telecom Co., Ltd. +# Label: "HiPKI Root CA - G1" +# Serial: 60966262342023497858655262305426234976 +# MD5 Fingerprint: 69:45:df:16:65:4b:e8:68:9a:8f:76:5f:ff:80:9e:d3 +# SHA1 Fingerprint: 6a:92:e4:a8:ee:1b:ec:96:45:37:e3:29:57:49:cd:96:e3:e5:d2:60 +# SHA256 Fingerprint: f0:15:ce:3c:c2:39:bf:ef:06:4b:e9:f1:d2:c4:17:e1:a0:26:4a:0a:94:be:1f:0c:8d:12:18:64:eb:69:49:cc +-----BEGIN CERTIFICATE----- +MIIFajCCA1KgAwIBAgIQLd2szmKXlKFD6LDNdmpeYDANBgkqhkiG9w0BAQsFADBP +MQswCQYDVQQGEwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0 +ZC4xGzAZBgNVBAMMEkhpUEtJIFJvb3QgQ0EgLSBHMTAeFw0xOTAyMjIwOTQ2MDRa +Fw0zNzEyMzExNTU5NTlaME8xCzAJBgNVBAYTAlRXMSMwIQYDVQQKDBpDaHVuZ2h3 +YSBUZWxlY29tIENvLiwgTHRkLjEbMBkGA1UEAwwSSGlQS0kgUm9vdCBDQSAtIEcx +MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA9B5/UnMyDHPkvRN0o9Qw +qNCuS9i233VHZvR85zkEHmpwINJaR3JnVfSl6J3VHiGh8Ge6zCFovkRTv4354twv +Vcg3Px+kwJyz5HdcoEb+d/oaoDjq7Zpy3iu9lFc6uux55199QmQ5eiY29yTw1S+6 +lZgRZq2XNdZ1AYDgr/SEYYwNHl98h5ZeQa/rh+r4XfEuiAU+TCK72h8q3VJGZDnz +Qs7ZngyzsHeXZJzA9KMuH5UHsBffMNsAGJZMoYFL3QRtU6M9/Aes1MU3guvklQgZ +KILSQjqj2FPseYlgSGDIcpJQ3AOPgz+yQlda22rpEZfdhSi8MEyr48KxRURHH+CK +FgeW0iEPU8DtqX7UTuybCeyvQqww1r/REEXgphaypcXTT3OUM3ECoWqj1jOXTyFj +HluP2cFeRXF3D4FdXyGarYPM+l7WjSNfGz1BryB1ZlpK9p/7qxj3ccC2HTHsOyDr +y+K49a6SsvfhhEvyovKTmiKe0xRvNlS9H15ZFblzqMF8b3ti6RZsR1pl8w4Rm0bZ +/W3c1pzAtH2lsN0/Vm+h+fbkEkj9Bn8SV7apI09bA8PgcSojt/ewsTu8mL3WmKgM +a/aOEmem8rJY5AIJEzypuxC00jBF8ez3ABHfZfjcK0NVvxaXxA/VLGGEqnKG/uY6 +fsI/fe78LxQ+5oXdUG+3Se0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAdBgNV +HQ4EFgQU8ncX+l6o/vY9cdVouslGDDjYr7AwDgYDVR0PAQH/BAQDAgGGMA0GCSqG +SIb3DQEBCwUAA4ICAQBQUfB13HAE4/+qddRxosuej6ip0691x1TPOhwEmSKsxBHi +7zNKpiMdDg1H2DfHb680f0+BazVP6XKlMeJ45/dOlBhbQH3PayFUhuaVevvGyuqc +SE5XCV0vrPSltJczWNWseanMX/mF+lLFjfiRFOs6DRfQUsJ748JzjkZ4Bjgs6Fza +ZsT0pPBWGTMpWmWSBUdGSquEwx4noR8RkpkndZMPvDY7l1ePJlsMu5wP1G4wB9Tc +XzZoZjmDlicmisjEOf6aIW/Vcobpf2Lll07QJNBAsNB1CI69aO4I1258EHBGG3zg +iLKecoaZAeO/n0kZtCW+VmWuF2PlHt/o/0elv+EmBYTksMCv5wiZqAxeJoBF1Pho +L5aPruJKHJwWDBNvOIf2u8g0X5IDUXlwpt/L9ZlNec1OvFefQ05rLisY+GpzjLrF +Ne85akEez3GoorKGB1s6yeHvP2UEgEcyRHCVTjFnanRbEEV16rCf0OY1/k6fi8wr +kkVbbiVghUbN0aqwdmaTd5a+g744tiROJgvM7XpWGuDpWsZkrUx6AEhEL7lAuxM+ +vhV4nYWBSipX3tUZQ9rbyltHhoMLP7YNdnhzeSJesYAfz77RP1YQmCuVh6EfnWQU +YDksswBVLuT1sw5XxJFBAJw/6KXf6vb/yPCtbVKoF6ubYfwSUTXkJf2vqmqGOQ== +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 +# Label: "GlobalSign ECC Root CA - R4" +# Serial: 159662223612894884239637590694 +# MD5 Fingerprint: 26:29:f8:6d:e1:88:bf:a2:65:7f:aa:c4:cd:0f:7f:fc +# SHA1 Fingerprint: 6b:a0:b0:98:e1:71:ef:5a:ad:fe:48:15:80:77:10:f4:bd:6f:0b:28 +# SHA256 Fingerprint: b0:85:d7:0b:96:4f:19:1a:73:e4:af:0d:54:ae:7a:0e:07:aa:fd:af:9b:71:dd:08:62:13:8a:b7:32:5a:24:a2 +-----BEGIN CERTIFICATE----- +MIIB3DCCAYOgAwIBAgINAgPlfvU/k/2lCSGypjAKBggqhkjOPQQDAjBQMSQwIgYD +VQQLExtHbG9iYWxTaWduIEVDQyBSb290IENBIC0gUjQxEzARBgNVBAoTCkdsb2Jh +bFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMTIxMTEzMDAwMDAwWhcNMzgw +MTE5MDMxNDA3WjBQMSQwIgYDVQQLExtHbG9iYWxTaWduIEVDQyBSb290IENBIC0g +UjQxEzARBgNVBAoTCkdsb2JhbFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wWTAT +BgcqhkjOPQIBBggqhkjOPQMBBwNCAAS4xnnTj2wlDp8uORkcA6SumuU5BwkWymOx +uYb4ilfBV85C+nOh92VC/x7BALJucw7/xyHlGKSq2XE/qNS5zowdo0IwQDAOBgNV +HQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUVLB7rUW44kB/ ++wpu+74zyTyjhNUwCgYIKoZIzj0EAwIDRwAwRAIgIk90crlgr/HmnKAWBVBfw147 +bmF0774BxL4YSFlhgjICICadVGNA3jdgUM/I2O2dgq43mLyjj0xMqTQrbO/7lZsm +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R1 O=Google Trust Services LLC +# Subject: CN=GTS Root R1 O=Google Trust Services LLC +# Label: "GTS Root R1" +# Serial: 159662320309726417404178440727 +# MD5 Fingerprint: 05:fe:d0:bf:71:a8:a3:76:63:da:01:e0:d8:52:dc:40 +# SHA1 Fingerprint: e5:8c:1c:c4:91:3b:38:63:4b:e9:10:6e:e3:ad:8e:6b:9d:d9:81:4a +# SHA256 Fingerprint: d9:47:43:2a:bd:e7:b7:fa:90:fc:2e:6b:59:10:1b:12:80:e0:e1:c7:e4:e4:0f:a3:c6:88:7f:ff:57:a7:f4:cf +-----BEGIN CERTIFICATE----- +MIIFVzCCAz+gAwIBAgINAgPlk28xsBNJiGuiFzANBgkqhkiG9w0BAQwFADBHMQsw +CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU +MBIGA1UEAxMLR1RTIFJvb3QgUjEwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw +MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp +Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjEwggIiMA0GCSqGSIb3DQEBAQUA +A4ICDwAwggIKAoICAQC2EQKLHuOhd5s73L+UPreVp0A8of2C+X0yBoJx9vaMf/vo +27xqLpeXo4xL+Sv2sfnOhB2x+cWX3u+58qPpvBKJXqeqUqv4IyfLpLGcY9vXmX7w +Cl7raKb0xlpHDU0QM+NOsROjyBhsS+z8CZDfnWQpJSMHobTSPS5g4M/SCYe7zUjw +TcLCeoiKu7rPWRnWr4+wB7CeMfGCwcDfLqZtbBkOtdh+JhpFAz2weaSUKK0Pfybl +qAj+lug8aJRT7oM6iCsVlgmy4HqMLnXWnOunVmSPlk9orj2XwoSPwLxAwAtcvfaH +szVsrBhQf4TgTM2S0yDpM7xSma8ytSmzJSq0SPly4cpk9+aCEI3oncKKiPo4Zor8 +Y/kB+Xj9e1x3+naH+uzfsQ55lVe0vSbv1gHR6xYKu44LtcXFilWr06zqkUspzBmk +MiVOKvFlRNACzqrOSbTqn3yDsEB750Orp2yjj32JgfpMpf/VjsPOS+C12LOORc92 +wO1AK/1TD7Cn1TsNsYqiA94xrcx36m97PtbfkSIS5r762DL8EGMUUXLeXdYWk70p +aDPvOmbsB4om3xPXV2V4J95eSRQAogB/mqghtqmxlbCluQ0WEdrHbEg8QOB+DVrN +VjzRlwW5y0vtOUucxD/SVRNuJLDWcfr0wbrM7Rv1/oFB2ACYPTrIrnqYNxgFlQID +AQABo0IwQDAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4E +FgQU5K8rJnEaK0gnhS9SZizv8IkTcT4wDQYJKoZIhvcNAQEMBQADggIBAJ+qQibb +C5u+/x6Wki4+omVKapi6Ist9wTrYggoGxval3sBOh2Z5ofmmWJyq+bXmYOfg6LEe +QkEzCzc9zolwFcq1JKjPa7XSQCGYzyI0zzvFIoTgxQ6KfF2I5DUkzps+GlQebtuy +h6f88/qBVRRiClmpIgUxPoLW7ttXNLwzldMXG+gnoot7TiYaelpkttGsN/H9oPM4 +7HLwEXWdyzRSjeZ2axfG34arJ45JK3VmgRAhpuo+9K4l/3wV3s6MJT/KYnAK9y8J +ZgfIPxz88NtFMN9iiMG1D53Dn0reWVlHxYciNuaCp+0KueIHoI17eko8cdLiA6Ef +MgfdG+RCzgwARWGAtQsgWSl4vflVy2PFPEz0tv/bal8xa5meLMFrUKTX5hgUvYU/ +Z6tGn6D/Qqc6f1zLXbBwHSs09dR2CQzreExZBfMzQsNhFRAbd03OIozUhfJFfbdT +6u9AWpQKXCBfTkBdYiJ23//OYb2MI3jSNwLgjt7RETeJ9r/tSQdirpLsQBqvFAnZ +0E6yove+7u7Y/9waLd64NnHi/Hm3lCXRSHNboTXns5lndcEZOitHTtNCjv0xyBZm +2tIMPNuzjsmhDYAPexZ3FL//2wmUspO8IFgV6dtxQ/PeEMMA3KgqlbbC1j+Qa3bb +bP6MvPJwNQzcmRk13NfIRmPVNnGuV/u3gm3c +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R2 O=Google Trust Services LLC +# Subject: CN=GTS Root R2 O=Google Trust Services LLC +# Label: "GTS Root R2" +# Serial: 159662449406622349769042896298 +# MD5 Fingerprint: 1e:39:c0:53:e6:1e:29:82:0b:ca:52:55:36:5d:57:dc +# SHA1 Fingerprint: 9a:44:49:76:32:db:de:fa:d0:bc:fb:5a:7b:17:bd:9e:56:09:24:94 +# SHA256 Fingerprint: 8d:25:cd:97:22:9d:bf:70:35:6b:da:4e:b3:cc:73:40:31:e2:4c:f0:0f:af:cf:d3:2d:c7:6e:b5:84:1c:7e:a8 +-----BEGIN CERTIFICATE----- +MIIFVzCCAz+gAwIBAgINAgPlrsWNBCUaqxElqjANBgkqhkiG9w0BAQwFADBHMQsw +CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU +MBIGA1UEAxMLR1RTIFJvb3QgUjIwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw +MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp +Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjIwggIiMA0GCSqGSIb3DQEBAQUA +A4ICDwAwggIKAoICAQDO3v2m++zsFDQ8BwZabFn3GTXd98GdVarTzTukk3LvCvpt +nfbwhYBboUhSnznFt+4orO/LdmgUud+tAWyZH8QiHZ/+cnfgLFuv5AS/T3KgGjSY +6Dlo7JUle3ah5mm5hRm9iYz+re026nO8/4Piy33B0s5Ks40FnotJk9/BW9BuXvAu +MC6C/Pq8tBcKSOWIm8Wba96wyrQD8Nr0kLhlZPdcTK3ofmZemde4wj7I0BOdre7k +RXuJVfeKH2JShBKzwkCX44ofR5GmdFrS+LFjKBC4swm4VndAoiaYecb+3yXuPuWg +f9RhD1FLPD+M2uFwdNjCaKH5wQzpoeJ/u1U8dgbuak7MkogwTZq9TwtImoS1mKPV ++3PBV2HdKFZ1E66HjucMUQkQdYhMvI35ezzUIkgfKtzra7tEscszcTJGr61K8Yzo +dDqs5xoic4DSMPclQsciOzsSrZYuxsN2B6ogtzVJV+mSSeh2FnIxZyuWfoqjx5RW +Ir9qS34BIbIjMt/kmkRtWVtd9QCgHJvGeJeNkP+byKq0rxFROV7Z+2et1VsRnTKa +G73VululycslaVNVJ1zgyjbLiGH7HrfQy+4W+9OmTN6SpdTi3/UGVN4unUu0kzCq +gc7dGtxRcw1PcOnlthYhGXmy5okLdWTK1au8CcEYof/UVKGFPP0UJAOyh9OktwID +AQABo0IwQDAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4E +FgQUu//KjiOfT5nK2+JopqUVJxce2Q4wDQYJKoZIhvcNAQEMBQADggIBAB/Kzt3H +vqGf2SdMC9wXmBFqiN495nFWcrKeGk6c1SuYJF2ba3uwM4IJvd8lRuqYnrYb/oM8 +0mJhwQTtzuDFycgTE1XnqGOtjHsB/ncw4c5omwX4Eu55MaBBRTUoCnGkJE+M3DyC +B19m3H0Q/gxhswWV7uGugQ+o+MePTagjAiZrHYNSVc61LwDKgEDg4XSsYPWHgJ2u +NmSRXbBoGOqKYcl3qJfEycel/FVL8/B/uWU9J2jQzGv6U53hkRrJXRqWbTKH7QMg +yALOWr7Z6v2yTcQvG99fevX4i8buMTolUVVnjWQye+mew4K6Ki3pHrTgSAai/Gev +HyICc/sgCq+dVEuhzf9gR7A/Xe8bVr2XIZYtCtFenTgCR2y59PYjJbigapordwj6 +xLEokCZYCDzifqrXPW+6MYgKBesntaFJ7qBFVHvmJ2WZICGoo7z7GJa7Um8M7YNR +TOlZ4iBgxcJlkoKM8xAfDoqXvneCbT+PHV28SSe9zE8P4c52hgQjxcCMElv924Sg +JPFI/2R80L5cFtHvma3AH/vLrrw4IgYmZNralw4/KBVEqE8AyvCazM90arQ+POuV +7LXTWtiBmelDGDfrs7vRWGJB82bSj6p4lVQgw1oudCvV0b4YacCs1aTPObpRhANl +6WLAYv7YTVWW4tAR+kg0Eeye7QUd5MjWHYbL +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R3 O=Google Trust Services LLC +# Subject: CN=GTS Root R3 O=Google Trust Services LLC +# Label: "GTS Root R3" +# Serial: 159662495401136852707857743206 +# MD5 Fingerprint: 3e:e7:9d:58:02:94:46:51:94:e5:e0:22:4a:8b:e7:73 +# SHA1 Fingerprint: ed:e5:71:80:2b:c8:92:b9:5b:83:3c:d2:32:68:3f:09:cd:a0:1e:46 +# SHA256 Fingerprint: 34:d8:a7:3e:e2:08:d9:bc:db:0d:95:65:20:93:4b:4e:40:e6:94:82:59:6e:8b:6f:73:c8:42:6b:01:0a:6f:48 +-----BEGIN CERTIFICATE----- +MIICCTCCAY6gAwIBAgINAgPluILrIPglJ209ZjAKBggqhkjOPQQDAzBHMQswCQYD +VQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEUMBIG +A1UEAxMLR1RTIFJvb3QgUjMwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAwMDAw +WjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2Vz +IExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjMwdjAQBgcqhkjOPQIBBgUrgQQAIgNi +AAQfTzOHMymKoYTey8chWEGJ6ladK0uFxh1MJ7x/JlFyb+Kf1qPKzEUURout736G +jOyxfi//qXGdGIRFBEFVbivqJn+7kAHjSxm65FSWRQmx1WyRRK2EE46ajA2ADDL2 +4CejQjBAMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW +BBTB8Sa6oC2uhYHP0/EqEr24Cmf9vDAKBggqhkjOPQQDAwNpADBmAjEA9uEglRR7 +VKOQFhG/hMjqb2sXnh5GmCCbn9MN2azTL818+FsuVbu/3ZL3pAzcMeGiAjEA/Jdm +ZuVDFhOD3cffL74UOO0BzrEXGhF16b0DjyZ+hOXJYKaV11RZt+cRLInUue4X +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R4 O=Google Trust Services LLC +# Subject: CN=GTS Root R4 O=Google Trust Services LLC +# Label: "GTS Root R4" +# Serial: 159662532700760215368942768210 +# MD5 Fingerprint: 43:96:83:77:19:4d:76:b3:9d:65:52:e4:1d:22:a5:e8 +# SHA1 Fingerprint: 77:d3:03:67:b5:e0:0c:15:f6:0c:38:61:df:7c:e1:3b:92:46:4d:47 +# SHA256 Fingerprint: 34:9d:fa:40:58:c5:e2:63:12:3b:39:8a:e7:95:57:3c:4e:13:13:c8:3f:e6:8f:93:55:6c:d5:e8:03:1b:3c:7d +-----BEGIN CERTIFICATE----- +MIICCTCCAY6gAwIBAgINAgPlwGjvYxqccpBQUjAKBggqhkjOPQQDAzBHMQswCQYD +VQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEUMBIG +A1UEAxMLR1RTIFJvb3QgUjQwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAwMDAw +WjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2Vz +IExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjQwdjAQBgcqhkjOPQIBBgUrgQQAIgNi +AATzdHOnaItgrkO4NcWBMHtLSZ37wWHO5t5GvWvVYRg1rkDdc/eJkTBa6zzuhXyi +QHY7qca4R9gq55KRanPpsXI5nymfopjTX15YhmUPoYRlBtHci8nHc8iMai/lxKvR +HYqjQjBAMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW +BBSATNbrdP9JNqPV2Py1PsVq8JQdjDAKBggqhkjOPQQDAwNpADBmAjEA6ED/g94D +9J+uHXqnLrmvT/aDHQ4thQEd0dlq7A/Cr8deVl5c1RxYIigL9zC2L7F8AjEA8GE8 +p/SgguMh1YQdc4acLa/KNJvxn7kjNuK8YAOdgLOaVsjh4rsUecrNIdSUtUlD +-----END CERTIFICATE----- + +# Issuer: CN=Telia Root CA v2 O=Telia Finland Oyj +# Subject: CN=Telia Root CA v2 O=Telia Finland Oyj +# Label: "Telia Root CA v2" +# Serial: 7288924052977061235122729490515358 +# MD5 Fingerprint: 0e:8f:ac:aa:82:df:85:b1:f4:dc:10:1c:fc:99:d9:48 +# SHA1 Fingerprint: b9:99:cd:d1:73:50:8a:c4:47:05:08:9c:8c:88:fb:be:a0:2b:40:cd +# SHA256 Fingerprint: 24:2b:69:74:2f:cb:1e:5b:2a:bf:98:89:8b:94:57:21:87:54:4e:5b:4d:99:11:78:65:73:62:1f:6a:74:b8:2c +-----BEGIN CERTIFICATE----- +MIIFdDCCA1ygAwIBAgIPAWdfJ9b+euPkrL4JWwWeMA0GCSqGSIb3DQEBCwUAMEQx +CzAJBgNVBAYTAkZJMRowGAYDVQQKDBFUZWxpYSBGaW5sYW5kIE95ajEZMBcGA1UE +AwwQVGVsaWEgUm9vdCBDQSB2MjAeFw0xODExMjkxMTU1NTRaFw00MzExMjkxMTU1 +NTRaMEQxCzAJBgNVBAYTAkZJMRowGAYDVQQKDBFUZWxpYSBGaW5sYW5kIE95ajEZ +MBcGA1UEAwwQVGVsaWEgUm9vdCBDQSB2MjCCAiIwDQYJKoZIhvcNAQEBBQADggIP +ADCCAgoCggIBALLQPwe84nvQa5n44ndp586dpAO8gm2h/oFlH0wnrI4AuhZ76zBq +AMCzdGh+sq/H1WKzej9Qyow2RCRj0jbpDIX2Q3bVTKFgcmfiKDOlyzG4OiIjNLh9 +vVYiQJ3q9HsDrWj8soFPmNB06o3lfc1jw6P23pLCWBnglrvFxKk9pXSW/q/5iaq9 +lRdU2HhE8Qx3FZLgmEKnpNaqIJLNwaCzlrI6hEKNfdWV5Nbb6WLEWLN5xYzTNTOD +n3WhUidhOPFZPY5Q4L15POdslv5e2QJltI5c0BE0312/UqeBAMN/mUWZFdUXyApT +7GPzmX3MaRKGwhfwAZ6/hLzRUssbkmbOpFPlob/E2wnW5olWK8jjfN7j/4nlNW4o +6GwLI1GpJQXrSPjdscr6bAhR77cYbETKJuFzxokGgeWKrLDiKca5JLNrRBH0pUPC +TEPlcDaMtjNXepUugqD0XBCzYYP2AgWGLnwtbNwDRm41k9V6lS/eINhbfpSQBGq6 +WT0EBXWdN6IOLj3rwaRSg/7Qa9RmjtzG6RJOHSpXqhC8fF6CfaamyfItufUXJ63R +DolUK5X6wK0dmBR4M0KGCqlztft0DbcbMBnEWg4cJ7faGND/isgFuvGqHKI3t+ZI +pEYslOqodmJHixBTB0hXbOKSTbauBcvcwUpej6w9GU7C7WB1K9vBykLVAgMBAAGj +YzBhMB8GA1UdIwQYMBaAFHKs5DN5qkWH9v2sHZ7Wxy+G2CQ5MB0GA1UdDgQWBBRy +rOQzeapFh/b9rB2e1scvhtgkOTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUw +AwEB/zANBgkqhkiG9w0BAQsFAAOCAgEAoDtZpwmUPjaE0n4vOaWWl/oRrfxn83EJ +8rKJhGdEr7nv7ZbsnGTbMjBvZ5qsfl+yqwE2foH65IRe0qw24GtixX1LDoJt0nZi +0f6X+J8wfBj5tFJ3gh1229MdqfDBmgC9bXXYfef6xzijnHDoRnkDry5023X4blMM +A8iZGok1GTzTyVR8qPAs5m4HeW9q4ebqkYJpCh3DflminmtGFZhb069GHWLIzoBS +SRE/yQQSwxN8PzuKlts8oB4KtItUsiRnDe+Cy748fdHif64W1lZYudogsYMVoe+K +TTJvQS8TUoKU1xrBeKJR3Stwbbca+few4GeXVtt8YVMJAygCQMez2P2ccGrGKMOF +6eLtGpOg3kuYooQ+BXcBlj37tCAPnHICehIv1aO6UXivKitEZU61/Qrowc15h2Er +3oBXRb9n8ZuRXqWk7FlIEA04x7D6w0RtBPV4UBySllva9bguulvP5fBqnUsvWHMt +Ty3EHD70sz+rFQ47GUGKpMFXEmZxTPpT41frYpUJnlTd0cI8Vzy9OK2YZLe4A5pT +VmBds9hCG1xLEooc6+t9xnppxyd/pPiL8uSUZodL6ZQHCRJ5irLrdATczvREWeAW +ysUsWNc8e89ihmpQfTU2Zqf7N+cox9jQraVplI/owd8k+BsHMYeB2F326CjYSlKA +rBPuUBQemMc= +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST BR Root CA 1 2020 O=D-Trust GmbH +# Subject: CN=D-TRUST BR Root CA 1 2020 O=D-Trust GmbH +# Label: "D-TRUST BR Root CA 1 2020" +# Serial: 165870826978392376648679885835942448534 +# MD5 Fingerprint: b5:aa:4b:d5:ed:f7:e3:55:2e:8f:72:0a:f3:75:b8:ed +# SHA1 Fingerprint: 1f:5b:98:f0:e3:b5:f7:74:3c:ed:e6:b0:36:7d:32:cd:f4:09:41:67 +# SHA256 Fingerprint: e5:9a:aa:81:60:09:c2:2b:ff:5b:25:ba:d3:7d:f3:06:f0:49:79:7c:1f:81:d8:5a:b0:89:e6:57:bd:8f:00:44 +-----BEGIN CERTIFICATE----- +MIIC2zCCAmCgAwIBAgIQfMmPK4TX3+oPyWWa00tNljAKBggqhkjOPQQDAzBIMQsw +CQYDVQQGEwJERTEVMBMGA1UEChMMRC1UcnVzdCBHbWJIMSIwIAYDVQQDExlELVRS +VVNUIEJSIFJvb3QgQ0EgMSAyMDIwMB4XDTIwMDIxMTA5NDUwMFoXDTM1MDIxMTA5 +NDQ1OVowSDELMAkGA1UEBhMCREUxFTATBgNVBAoTDEQtVHJ1c3QgR21iSDEiMCAG +A1UEAxMZRC1UUlVTVCBCUiBSb290IENBIDEgMjAyMDB2MBAGByqGSM49AgEGBSuB +BAAiA2IABMbLxyjR+4T1mu9CFCDhQ2tuda38KwOE1HaTJddZO0Flax7mNCq7dPYS +zuht56vkPE4/RAiLzRZxy7+SmfSk1zxQVFKQhYN4lGdnoxwJGT11NIXe7WB9xwy0 +QVK5buXuQqOCAQ0wggEJMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFHOREKv/ +VbNafAkl1bK6CKBrqx9tMA4GA1UdDwEB/wQEAwIBBjCBxgYDVR0fBIG+MIG7MD6g +PKA6hjhodHRwOi8vY3JsLmQtdHJ1c3QubmV0L2NybC9kLXRydXN0X2JyX3Jvb3Rf +Y2FfMV8yMDIwLmNybDB5oHegdYZzbGRhcDovL2RpcmVjdG9yeS5kLXRydXN0Lm5l +dC9DTj1ELVRSVVNUJTIwQlIlMjBSb290JTIwQ0ElMjAxJTIwMjAyMCxPPUQtVHJ1 +c3QlMjBHbWJILEM9REU/Y2VydGlmaWNhdGVyZXZvY2F0aW9ubGlzdDAKBggqhkjO +PQQDAwNpADBmAjEAlJAtE/rhY/hhY+ithXhUkZy4kzg+GkHaQBZTQgjKL47xPoFW +wKrY7RjEsK70PvomAjEA8yjixtsrmfu3Ubgko6SUeho/5jbiA1czijDLgsfWFBHV +dWNbFJWcHwHP2NVypw87 +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST EV Root CA 1 2020 O=D-Trust GmbH +# Subject: CN=D-TRUST EV Root CA 1 2020 O=D-Trust GmbH +# Label: "D-TRUST EV Root CA 1 2020" +# Serial: 126288379621884218666039612629459926992 +# MD5 Fingerprint: 8c:2d:9d:70:9f:48:99:11:06:11:fb:e9:cb:30:c0:6e +# SHA1 Fingerprint: 61:db:8c:21:59:69:03:90:d8:7c:9c:12:86:54:cf:9d:3d:f4:dd:07 +# SHA256 Fingerprint: 08:17:0d:1a:a3:64:53:90:1a:2f:95:92:45:e3:47:db:0c:8d:37:ab:aa:bc:56:b8:1a:a1:00:dc:95:89:70:db +-----BEGIN CERTIFICATE----- +MIIC2zCCAmCgAwIBAgIQXwJB13qHfEwDo6yWjfv/0DAKBggqhkjOPQQDAzBIMQsw +CQYDVQQGEwJERTEVMBMGA1UEChMMRC1UcnVzdCBHbWJIMSIwIAYDVQQDExlELVRS +VVNUIEVWIFJvb3QgQ0EgMSAyMDIwMB4XDTIwMDIxMTEwMDAwMFoXDTM1MDIxMTA5 +NTk1OVowSDELMAkGA1UEBhMCREUxFTATBgNVBAoTDEQtVHJ1c3QgR21iSDEiMCAG +A1UEAxMZRC1UUlVTVCBFViBSb290IENBIDEgMjAyMDB2MBAGByqGSM49AgEGBSuB +BAAiA2IABPEL3YZDIBnfl4XoIkqbz52Yv7QFJsnL46bSj8WeeHsxiamJrSc8ZRCC +/N/DnU7wMyPE0jL1HLDfMxddxfCxivnvubcUyilKwg+pf3VlSSowZ/Rk99Yad9rD +wpdhQntJraOCAQ0wggEJMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFH8QARY3 +OqQo5FD4pPfsazK2/umLMA4GA1UdDwEB/wQEAwIBBjCBxgYDVR0fBIG+MIG7MD6g +PKA6hjhodHRwOi8vY3JsLmQtdHJ1c3QubmV0L2NybC9kLXRydXN0X2V2X3Jvb3Rf +Y2FfMV8yMDIwLmNybDB5oHegdYZzbGRhcDovL2RpcmVjdG9yeS5kLXRydXN0Lm5l +dC9DTj1ELVRSVVNUJTIwRVYlMjBSb290JTIwQ0ElMjAxJTIwMjAyMCxPPUQtVHJ1 +c3QlMjBHbWJILEM9REU/Y2VydGlmaWNhdGVyZXZvY2F0aW9ubGlzdDAKBggqhkjO +PQQDAwNpADBmAjEAyjzGKnXCXnViOTYAYFqLwZOZzNnbQTs7h5kXO9XMT8oi96CA +y/m0sRtW9XLS/BnRAjEAkfcwkz8QRitxpNA7RJvAKQIFskF3UfN5Wp6OFKBOQtJb +gfM0agPnIjhQW+0ZT0MW +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert TLS ECC P384 Root G5 O=DigiCert, Inc. +# Subject: CN=DigiCert TLS ECC P384 Root G5 O=DigiCert, Inc. +# Label: "DigiCert TLS ECC P384 Root G5" +# Serial: 13129116028163249804115411775095713523 +# MD5 Fingerprint: d3:71:04:6a:43:1c:db:a6:59:e1:a8:a3:aa:c5:71:ed +# SHA1 Fingerprint: 17:f3:de:5e:9f:0f:19:e9:8e:f6:1f:32:26:6e:20:c4:07:ae:30:ee +# SHA256 Fingerprint: 01:8e:13:f0:77:25:32:cf:80:9b:d1:b1:72:81:86:72:83:fc:48:c6:e1:3b:e9:c6:98:12:85:4a:49:0c:1b:05 +-----BEGIN CERTIFICATE----- +MIICGTCCAZ+gAwIBAgIQCeCTZaz32ci5PhwLBCou8zAKBggqhkjOPQQDAzBOMQsw +CQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQsIEluYy4xJjAkBgNVBAMTHURp +Z2lDZXJ0IFRMUyBFQ0MgUDM4NCBSb290IEc1MB4XDTIxMDExNTAwMDAwMFoXDTQ2 +MDExNDIzNTk1OVowTjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDkRpZ2lDZXJ0LCBJ +bmMuMSYwJAYDVQQDEx1EaWdpQ2VydCBUTFMgRUNDIFAzODQgUm9vdCBHNTB2MBAG +ByqGSM49AgEGBSuBBAAiA2IABMFEoc8Rl1Ca3iOCNQfN0MsYndLxf3c1TzvdlHJS +7cI7+Oz6e2tYIOyZrsn8aLN1udsJ7MgT9U7GCh1mMEy7H0cKPGEQQil8pQgO4CLp +0zVozptjn4S1mU1YoI71VOeVyaNCMEAwHQYDVR0OBBYEFMFRRVBZqz7nLFr6ICIS +B4CIfBFqMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MAoGCCqGSM49 +BAMDA2gAMGUCMQCJao1H5+z8blUD2WdsJk6Dxv3J+ysTvLd6jLRl0mlpYxNjOyZQ +LgGheQaRnUi/wr4CMEfDFXuxoJGZSZOoPHzoRgaLLPIxAJSdYsiJvRmEFOml+wG4 +DXZDjC5Ty3zfDBeWUA== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert TLS RSA4096 Root G5 O=DigiCert, Inc. +# Subject: CN=DigiCert TLS RSA4096 Root G5 O=DigiCert, Inc. +# Label: "DigiCert TLS RSA4096 Root G5" +# Serial: 11930366277458970227240571539258396554 +# MD5 Fingerprint: ac:fe:f7:34:96:a9:f2:b3:b4:12:4b:e4:27:41:6f:e1 +# SHA1 Fingerprint: a7:88:49:dc:5d:7c:75:8c:8c:de:39:98:56:b3:aa:d0:b2:a5:71:35 +# SHA256 Fingerprint: 37:1a:00:dc:05:33:b3:72:1a:7e:eb:40:e8:41:9e:70:79:9d:2b:0a:0f:2c:1d:80:69:31:65:f7:ce:c4:ad:75 +-----BEGIN CERTIFICATE----- +MIIFZjCCA06gAwIBAgIQCPm0eKj6ftpqMzeJ3nzPijANBgkqhkiG9w0BAQwFADBN +MQswCQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQsIEluYy4xJTAjBgNVBAMT +HERpZ2lDZXJ0IFRMUyBSU0E0MDk2IFJvb3QgRzUwHhcNMjEwMTE1MDAwMDAwWhcN +NDYwMTE0MjM1OTU5WjBNMQswCQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQs +IEluYy4xJTAjBgNVBAMTHERpZ2lDZXJ0IFRMUyBSU0E0MDk2IFJvb3QgRzUwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCz0PTJeRGd/fxmgefM1eS87IE+ +ajWOLrfn3q/5B03PMJ3qCQuZvWxX2hhKuHisOjmopkisLnLlvevxGs3npAOpPxG0 +2C+JFvuUAT27L/gTBaF4HI4o4EXgg/RZG5Wzrn4DReW+wkL+7vI8toUTmDKdFqgp +wgscONyfMXdcvyej/Cestyu9dJsXLfKB2l2w4SMXPohKEiPQ6s+d3gMXsUJKoBZM +pG2T6T867jp8nVid9E6P/DsjyG244gXazOvswzH016cpVIDPRFtMbzCe88zdH5RD +nU1/cHAN1DrRN/BsnZvAFJNY781BOHW8EwOVfH/jXOnVDdXifBBiqmvwPXbzP6Po +sMH976pXTayGpxi0KcEsDr9kvimM2AItzVwv8n/vFfQMFawKsPHTDU9qTXeXAaDx +Zre3zu/O7Oyldcqs4+Fj97ihBMi8ez9dLRYiVu1ISf6nL3kwJZu6ay0/nTvEF+cd +Lvvyz6b84xQslpghjLSR6Rlgg/IwKwZzUNWYOwbpx4oMYIwo+FKbbuH2TbsGJJvX +KyY//SovcfXWJL5/MZ4PbeiPT02jP/816t9JXkGPhvnxd3lLG7SjXi/7RgLQZhNe +XoVPzthwiHvOAbWWl9fNff2C+MIkwcoBOU+NosEUQB+cZtUMCUbW8tDRSHZWOkPL +tgoRObqME2wGtZ7P6wIDAQABo0IwQDAdBgNVHQ4EFgQUUTMc7TZArxfTJc1paPKv +TiM+s0EwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcN +AQEMBQADggIBAGCmr1tfV9qJ20tQqcQjNSH/0GEwhJG3PxDPJY7Jv0Y02cEhJhxw +GXIeo8mH/qlDZJY6yFMECrZBu8RHANmfGBg7sg7zNOok992vIGCukihfNudd5N7H +PNtQOa27PShNlnx2xlv0wdsUpasZYgcYQF+Xkdycx6u1UQ3maVNVzDl92sURVXLF +O4uJ+DQtpBflF+aZfTCIITfNMBc9uPK8qHWgQ9w+iUuQrm0D4ByjoJYJu32jtyoQ +REtGBzRj7TG5BO6jm5qu5jF49OokYTurWGT/u4cnYiWB39yhL/btp/96j1EuMPik +AdKFOV8BmZZvWltwGUb+hmA+rYAQCd05JS9Yf7vSdPD3Rh9GOUrYU9DzLjtxpdRv +/PNn5AeP3SYZ4Y1b+qOTEZvpyDrDVWiakuFSdjjo4bq9+0/V77PnSIMx8IIh47a+ +p6tv75/fTM8BuGJqIz3nCU2AG3swpMPdB380vqQmsvZB6Akd4yCYqjdP//fx4ilw +MUc/dNAUFvohigLVigmUdy7yWSiLfFCSCmZ4OIN1xLVaqBHG5cGdZlXPU8Sv13WF +qUITVuwhd4GTWgzqltlJyqEI8pc7bZsEGCREjnwB8twl2F6GmrE52/WRMmrRpnCK +ovfepEWFJqgejF0pW8hL2JpqA15w8oVPbEtoL8pU9ozaMv7Da4M/OMZ+ +-----END CERTIFICATE----- + +# Issuer: CN=Certainly Root R1 O=Certainly +# Subject: CN=Certainly Root R1 O=Certainly +# Label: "Certainly Root R1" +# Serial: 188833316161142517227353805653483829216 +# MD5 Fingerprint: 07:70:d4:3e:82:87:a0:fa:33:36:13:f4:fa:33:e7:12 +# SHA1 Fingerprint: a0:50:ee:0f:28:71:f4:27:b2:12:6d:6f:50:96:25:ba:cc:86:42:af +# SHA256 Fingerprint: 77:b8:2c:d8:64:4c:43:05:f7:ac:c5:cb:15:6b:45:67:50:04:03:3d:51:c6:0c:62:02:a8:e0:c3:34:67:d3:a0 +-----BEGIN CERTIFICATE----- +MIIFRzCCAy+gAwIBAgIRAI4P+UuQcWhlM1T01EQ5t+AwDQYJKoZIhvcNAQELBQAw +PTELMAkGA1UEBhMCVVMxEjAQBgNVBAoTCUNlcnRhaW5seTEaMBgGA1UEAxMRQ2Vy +dGFpbmx5IFJvb3QgUjEwHhcNMjEwNDAxMDAwMDAwWhcNNDYwNDAxMDAwMDAwWjA9 +MQswCQYDVQQGEwJVUzESMBAGA1UEChMJQ2VydGFpbmx5MRowGAYDVQQDExFDZXJ0 +YWlubHkgUm9vdCBSMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANA2 +1B/q3avk0bbm+yLA3RMNansiExyXPGhjZjKcA7WNpIGD2ngwEc/csiu+kr+O5MQT +vqRoTNoCaBZ0vrLdBORrKt03H2As2/X3oXyVtwxwhi7xOu9S98zTm/mLvg7fMbed +aFySpvXl8wo0tf97ouSHocavFwDvA5HtqRxOcT3Si2yJ9HiG5mpJoM610rCrm/b0 +1C7jcvk2xusVtyWMOvwlDbMicyF0yEqWYZL1LwsYpfSt4u5BvQF5+paMjRcCMLT5 +r3gajLQ2EBAHBXDQ9DGQilHFhiZ5shGIXsXwClTNSaa/ApzSRKft43jvRl5tcdF5 +cBxGX1HpyTfcX35pe0HfNEXgO4T0oYoKNp43zGJS4YkNKPl6I7ENPT2a/Z2B7yyQ +wHtETrtJ4A5KVpK8y7XdeReJkd5hiXSSqOMyhb5OhaRLWcsrxXiOcVTQAjeZjOVJ +6uBUcqQRBi8LjMFbvrWhsFNunLhgkR9Za/kt9JQKl7XsxXYDVBtlUrpMklZRNaBA +2CnbrlJ2Oy0wQJuK0EJWtLeIAaSHO1OWzaMWj/Nmqhexx2DgwUMFDO6bW2BvBlyH +Wyf5QBGenDPBt+U1VwV/J84XIIwc/PH72jEpSe31C4SnT8H2TsIonPru4K8H+zMR +eiFPCyEQtkA6qyI6BJyLm4SGcprSp6XEtHWRqSsjAgMBAAGjQjBAMA4GA1UdDwEB +/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTgqj8ljZ9EXME66C6u +d0yEPmcM9DANBgkqhkiG9w0BAQsFAAOCAgEAuVevuBLaV4OPaAszHQNTVfSVcOQr +PbA56/qJYv331hgELyE03fFo8NWWWt7CgKPBjcZq91l3rhVkz1t5BXdm6ozTaw3d +8VkswTOlMIAVRQdFGjEitpIAq5lNOo93r6kiyi9jyhXWx8bwPWz8HA2YEGGeEaIi +1wrykXprOQ4vMMM2SZ/g6Q8CRFA3lFV96p/2O7qUpUzpvD5RtOjKkjZUbVwlKNrd +rRT90+7iIgXr0PK3aBLXWopBGsaSpVo7Y0VPv+E6dyIvXL9G+VoDhRNCX8reU9di +taY1BMJH/5n9hN9czulegChB8n3nHpDYT3Y+gjwN/KUD+nsa2UUeYNrEjvn8K8l7 +lcUq/6qJ34IxD3L/DCfXCh5WAFAeDJDBlrXYFIW7pw0WwfgHJBu6haEaBQmAupVj +yTrsJZ9/nbqkRxWbRHDxakvWOF5D8xh+UG7pWijmZeZ3Gzr9Hb4DJqPb1OG7fpYn +Kx3upPvaJVQTA945xsMfTZDsjxtK0hzthZU4UHlG1sGQUDGpXJpuHfUzVounmdLy +yCwzk5Iwx06MZTMQZBf9JBeW0Y3COmor6xOLRPIh80oat3df1+2IpHLlOR+Vnb5n +wXARPbv0+Em34yaXOp/SX3z7wJl8OSngex2/DaeP0ik0biQVy96QXr8axGbqwua6 +OV+KmalBWQewLK8= +-----END CERTIFICATE----- + +# Issuer: CN=Certainly Root E1 O=Certainly +# Subject: CN=Certainly Root E1 O=Certainly +# Label: "Certainly Root E1" +# Serial: 8168531406727139161245376702891150584 +# MD5 Fingerprint: 0a:9e:ca:cd:3e:52:50:c6:36:f3:4b:a3:ed:a7:53:e9 +# SHA1 Fingerprint: f9:e1:6d:dc:01:89:cf:d5:82:45:63:3e:c5:37:7d:c2:eb:93:6f:2b +# SHA256 Fingerprint: b4:58:5f:22:e4:ac:75:6a:4e:86:12:a1:36:1c:5d:9d:03:1a:93:fd:84:fe:bb:77:8f:a3:06:8b:0f:c4:2d:c2 +-----BEGIN CERTIFICATE----- +MIIB9zCCAX2gAwIBAgIQBiUzsUcDMydc+Y2aub/M+DAKBggqhkjOPQQDAzA9MQsw +CQYDVQQGEwJVUzESMBAGA1UEChMJQ2VydGFpbmx5MRowGAYDVQQDExFDZXJ0YWlu +bHkgUm9vdCBFMTAeFw0yMTA0MDEwMDAwMDBaFw00NjA0MDEwMDAwMDBaMD0xCzAJ +BgNVBAYTAlVTMRIwEAYDVQQKEwlDZXJ0YWlubHkxGjAYBgNVBAMTEUNlcnRhaW5s +eSBSb290IEUxMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAE3m/4fxzf7flHh4axpMCK ++IKXgOqPyEpeKn2IaKcBYhSRJHpcnqMXfYqGITQYUBsQ3tA3SybHGWCA6TS9YBk2 +QNYphwk8kXr2vBMj3VlOBF7PyAIcGFPBMdjaIOlEjeR2o0IwQDAOBgNVHQ8BAf8E +BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU8ygYy2R17ikq6+2uI1g4 +hevIIgcwCgYIKoZIzj0EAwMDaAAwZQIxALGOWiDDshliTd6wT99u0nCK8Z9+aozm +ut6Dacpps6kFtZaSF4fC0urQe87YQVt8rgIwRt7qy12a7DLCZRawTDBcMPPaTnOG +BtjOiQRINzf43TNRnXCve1XYAS59BWQOhriR +-----END CERTIFICATE----- + +# Issuer: CN=Security Communication RootCA3 O=SECOM Trust Systems CO.,LTD. +# Subject: CN=Security Communication RootCA3 O=SECOM Trust Systems CO.,LTD. +# Label: "Security Communication RootCA3" +# Serial: 16247922307909811815 +# MD5 Fingerprint: 1c:9a:16:ff:9e:5c:e0:4d:8a:14:01:f4:35:5d:29:26 +# SHA1 Fingerprint: c3:03:c8:22:74:92:e5:61:a2:9c:5f:79:91:2b:1e:44:13:91:30:3a +# SHA256 Fingerprint: 24:a5:5c:2a:b0:51:44:2d:06:17:76:65:41:23:9a:4a:d0:32:d7:c5:51:75:aa:34:ff:de:2f:bc:4f:5c:52:94 +-----BEGIN CERTIFICATE----- +MIIFfzCCA2egAwIBAgIJAOF8N0D9G/5nMA0GCSqGSIb3DQEBDAUAMF0xCzAJBgNV +BAYTAkpQMSUwIwYDVQQKExxTRUNPTSBUcnVzdCBTeXN0ZW1zIENPLixMVEQuMScw +JQYDVQQDEx5TZWN1cml0eSBDb21tdW5pY2F0aW9uIFJvb3RDQTMwHhcNMTYwNjE2 +MDYxNzE2WhcNMzgwMTE4MDYxNzE2WjBdMQswCQYDVQQGEwJKUDElMCMGA1UEChMc +U0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEnMCUGA1UEAxMeU2VjdXJpdHkg +Q29tbXVuaWNhdGlvbiBSb290Q0EzMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC +CgKCAgEA48lySfcw3gl8qUCBWNO0Ot26YQ+TUG5pPDXC7ltzkBtnTCHsXzW7OT4r +CmDvu20rhvtxosis5FaU+cmvsXLUIKx00rgVrVH+hXShuRD+BYD5UpOzQD11EKzA +lrenfna84xtSGc4RHwsENPXY9Wk8d/Nk9A2qhd7gCVAEF5aEt8iKvE1y/By7z/MG +TfmfZPd+pmaGNXHIEYBMwXFAWB6+oHP2/D5Q4eAvJj1+XCO1eXDe+uDRpdYMQXF7 +9+qMHIjH7Iv10S9VlkZ8WjtYO/u62C21Jdp6Ts9EriGmnpjKIG58u4iFW/vAEGK7 +8vknR+/RiTlDxN/e4UG/VHMgly1s2vPUB6PmudhvrvyMGS7TZ2crldtYXLVqAvO4 +g160a75BflcJdURQVc1aEWEhCmHCqYj9E7wtiS/NYeCVvsq1e+F7NGcLH7YMx3we +GVPKp7FKFSBWFHA9K4IsD50VHUeAR/94mQ4xr28+j+2GaR57GIgUssL8gjMunEst ++3A7caoreyYn8xrC3PsXuKHqy6C0rtOUfnrQq8PsOC0RLoi/1D+tEjtCrI8Cbn3M +0V9hvqG8OmpI6iZVIhZdXw3/JzOfGAN0iltSIEdrRU0id4xVJ/CvHozJgyJUt5rQ +T9nO/NkuHJYosQLTA70lUhw0Zk8jq/R3gpYd0VcwCBEF/VfR2ccCAwEAAaNCMEAw +HQYDVR0OBBYEFGQUfPxYchamCik0FW8qy7z8r6irMA4GA1UdDwEB/wQEAwIBBjAP +BgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3DQEBDAUAA4ICAQDcAiMI4u8hOscNtybS +YpOnpSNyByCCYN8Y11StaSWSntkUz5m5UoHPrmyKO1o5yGwBQ8IibQLwYs1OY0PA +FNr0Y/Dq9HHuTofjcan0yVflLl8cebsjqodEV+m9NU1Bu0soo5iyG9kLFwfl9+qd +9XbXv8S2gVj/yP9kaWJ5rW4OH3/uHWnlt3Jxs/6lATWUVCvAUm2PVcTJ0rjLyjQI +UYWg9by0F1jqClx6vWPGOi//lkkZhOpn2ASxYfQAW0q3nHE3GYV5v4GwxxMOdnE+ +OoAGrgYWp421wsTL/0ClXI2lyTrtcoHKXJg80jQDdwj98ClZXSEIx2C/pHF7uNke +gr4Jr2VvKKu/S7XuPghHJ6APbw+LP6yVGPO5DtxnVW5inkYO0QR4ynKudtml+LLf +iAlhi+8kTtFZP1rUPcmTPCtk9YENFpb3ksP+MW/oKjJ0DvRMmEoYDjBU1cXrvMUV +nuiZIesnKwkK2/HmcBhWuwzkvvnoEKQTkrgc4NtnHVMDpCKn3F2SEDzq//wbEBrD +2NCcnWXL0CsnMQMeNuE9dnUM/0Umud1RvCPHX9jYhxBAEg09ODfnRDwYwFMJZI// +1ZqmfHAuc1Uh6N//g7kdPjIe1qZ9LPFm6Vwdp6POXiUyK+OVrCoHzrQoeIY8Laad +TdJ0MN1kURXbg4NR16/9M51NZg== +-----END CERTIFICATE----- + +# Issuer: CN=Security Communication ECC RootCA1 O=SECOM Trust Systems CO.,LTD. +# Subject: CN=Security Communication ECC RootCA1 O=SECOM Trust Systems CO.,LTD. +# Label: "Security Communication ECC RootCA1" +# Serial: 15446673492073852651 +# MD5 Fingerprint: 7e:43:b0:92:68:ec:05:43:4c:98:ab:5d:35:2e:7e:86 +# SHA1 Fingerprint: b8:0e:26:a9:bf:d2:b2:3b:c0:ef:46:c9:ba:c7:bb:f6:1d:0d:41:41 +# SHA256 Fingerprint: e7:4f:bd:a5:5b:d5:64:c4:73:a3:6b:44:1a:a7:99:c8:a6:8e:07:74:40:e8:28:8b:9f:a1:e5:0e:4b:ba:ca:11 +-----BEGIN CERTIFICATE----- +MIICODCCAb6gAwIBAgIJANZdm7N4gS7rMAoGCCqGSM49BAMDMGExCzAJBgNVBAYT +AkpQMSUwIwYDVQQKExxTRUNPTSBUcnVzdCBTeXN0ZW1zIENPLixMVEQuMSswKQYD +VQQDEyJTZWN1cml0eSBDb21tdW5pY2F0aW9uIEVDQyBSb290Q0ExMB4XDTE2MDYx +NjA1MTUyOFoXDTM4MDExODA1MTUyOFowYTELMAkGA1UEBhMCSlAxJTAjBgNVBAoT +HFNFQ09NIFRydXN0IFN5c3RlbXMgQ08uLExURC4xKzApBgNVBAMTIlNlY3VyaXR5 +IENvbW11bmljYXRpb24gRUNDIFJvb3RDQTEwdjAQBgcqhkjOPQIBBgUrgQQAIgNi +AASkpW9gAwPDvTH00xecK4R1rOX9PVdu12O/5gSJko6BnOPpR27KkBLIE+Cnnfdl +dB9sELLo5OnvbYUymUSxXv3MdhDYW72ixvnWQuRXdtyQwjWpS4g8EkdtXP9JTxpK +ULGjQjBAMB0GA1UdDgQWBBSGHOf+LaVKiwj+KBH6vqNm+GBZLzAOBgNVHQ8BAf8E +BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjAVXUI9/Lbu +9zuxNuie9sRGKEkz0FhDKmMpzE2xtHqiuQ04pV1IKv3LsnNdo4gIxwwCMQDAqy0O +be0YottT6SXbVQjgUMzfRGEWgqtJsLKB7HOHeLRMsmIbEvoWTSVLY70eN9k= +-----END CERTIFICATE----- + +# Issuer: CN=BJCA Global Root CA1 O=BEIJING CERTIFICATE AUTHORITY +# Subject: CN=BJCA Global Root CA1 O=BEIJING CERTIFICATE AUTHORITY +# Label: "BJCA Global Root CA1" +# Serial: 113562791157148395269083148143378328608 +# MD5 Fingerprint: 42:32:99:76:43:33:36:24:35:07:82:9b:28:f9:d0:90 +# SHA1 Fingerprint: d5:ec:8d:7b:4c:ba:79:f4:e7:e8:cb:9d:6b:ae:77:83:10:03:21:6a +# SHA256 Fingerprint: f3:89:6f:88:fe:7c:0a:88:27:66:a7:fa:6a:d2:74:9f:b5:7a:7f:3e:98:fb:76:9c:1f:a7:b0:9c:2c:44:d5:ae +-----BEGIN CERTIFICATE----- +MIIFdDCCA1ygAwIBAgIQVW9l47TZkGobCdFsPsBsIDANBgkqhkiG9w0BAQsFADBU +MQswCQYDVQQGEwJDTjEmMCQGA1UECgwdQkVJSklORyBDRVJUSUZJQ0FURSBBVVRI +T1JJVFkxHTAbBgNVBAMMFEJKQ0EgR2xvYmFsIFJvb3QgQ0ExMB4XDTE5MTIxOTAz +MTYxN1oXDTQ0MTIxMjAzMTYxN1owVDELMAkGA1UEBhMCQ04xJjAkBgNVBAoMHUJF +SUpJTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZMR0wGwYDVQQDDBRCSkNBIEdsb2Jh +bCBSb290IENBMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAPFmCL3Z +xRVhy4QEQaVpN3cdwbB7+sN3SJATcmTRuHyQNZ0YeYjjlwE8R4HyDqKYDZ4/N+AZ +spDyRhySsTphzvq3Rp4Dhtczbu33RYx2N95ulpH3134rhxfVizXuhJFyV9xgw8O5 +58dnJCNPYwpj9mZ9S1WnP3hkSWkSl+BMDdMJoDIwOvqfwPKcxRIqLhy1BDPapDgR +at7GGPZHOiJBhyL8xIkoVNiMpTAK+BcWyqw3/XmnkRd4OJmtWO2y3syJfQOcs4ll +5+M7sSKGjwZteAf9kRJ/sGsciQ35uMt0WwfCyPQ10WRjeulumijWML3mG90Vr4Tq +nMfK9Q7q8l0ph49pczm+LiRvRSGsxdRpJQaDrXpIhRMsDQa4bHlW/KNnMoH1V6XK +V0Jp6VwkYe/iMBhORJhVb3rCk9gZtt58R4oRTklH2yiUAguUSiz5EtBP6DF+bHq/ +pj+bOT0CFqMYs2esWz8sgytnOYFcuX6U1WTdno9uruh8W7TXakdI136z1C2OVnZO +z2nxbkRs1CTqjSShGL+9V/6pmTW12xB3uD1IutbB5/EjPtffhZ0nPNRAvQoMvfXn +jSXWgXSHRtQpdaJCbPdzied9v3pKH9MiyRVVz99vfFXQpIsHETdfg6YmV6YBW37+ +WGgHqel62bno/1Afq8K0wM7o6v0PvY1NuLxxAgMBAAGjQjBAMB0GA1UdDgQWBBTF +7+3M2I0hxkjk49cULqcWk+WYATAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQE +AwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAUoKsITQfI/Ki2Pm4rzc2IInRNwPWaZ+4 +YRC6ojGYWUfo0Q0lHhVBDOAqVdVXUsv45Mdpox1NcQJeXyFFYEhcCY5JEMEE3Kli +awLwQ8hOnThJdMkycFRtwUf8jrQ2ntScvd0g1lPJGKm1Vrl2i5VnZu69mP6u775u ++2D2/VnGKhs/I0qUJDAnyIm860Qkmss9vk/Ves6OF8tiwdneHg56/0OGNFK8YT88 +X7vZdrRTvJez/opMEi4r89fO4aL/3Xtw+zuhTaRjAv04l5U/BXCga99igUOLtFkN +SoxUnMW7gZ/NfaXvCyUeOiDbHPwfmGcCCtRzRBPbUYQaVQNW4AB+dAb/OMRyHdOo +P2gxXdMJxy6MW2Pg6Nwe0uxhHvLe5e/2mXZgLR6UcnHGCyoyx5JO1UbXHfmpGQrI ++pXObSOYqgs4rZpWDW+N8TEAiMEXnM0ZNjX+VVOg4DwzX5Ze4jLp3zO7Bkqp2IRz +znfSxqxx4VyjHQy7Ct9f4qNx2No3WqB4K/TUfet27fJhcKVlmtOJNBir+3I+17Q9 +eVzYH6Eze9mCUAyTF6ps3MKCuwJXNq+YJyo5UOGwifUll35HaBC07HPKs5fRJNz2 +YqAo07WjuGS3iGJCz51TzZm+ZGiPTx4SSPfSKcOYKMryMguTjClPPGAyzQWWYezy +r/6zcCwupvI= +-----END CERTIFICATE----- + +# Issuer: CN=BJCA Global Root CA2 O=BEIJING CERTIFICATE AUTHORITY +# Subject: CN=BJCA Global Root CA2 O=BEIJING CERTIFICATE AUTHORITY +# Label: "BJCA Global Root CA2" +# Serial: 58605626836079930195615843123109055211 +# MD5 Fingerprint: 5e:0a:f6:47:5f:a6:14:e8:11:01:95:3f:4d:01:eb:3c +# SHA1 Fingerprint: f4:27:86:eb:6e:b8:6d:88:31:67:02:fb:ba:66:a4:53:00:aa:7a:a6 +# SHA256 Fingerprint: 57:4d:f6:93:1e:27:80:39:66:7b:72:0a:fd:c1:60:0f:c2:7e:b6:6d:d3:09:29:79:fb:73:85:64:87:21:28:82 +-----BEGIN CERTIFICATE----- +MIICJTCCAaugAwIBAgIQLBcIfWQqwP6FGFkGz7RK6zAKBggqhkjOPQQDAzBUMQsw +CQYDVQQGEwJDTjEmMCQGA1UECgwdQkVJSklORyBDRVJUSUZJQ0FURSBBVVRIT1JJ +VFkxHTAbBgNVBAMMFEJKQ0EgR2xvYmFsIFJvb3QgQ0EyMB4XDTE5MTIxOTAzMTgy +MVoXDTQ0MTIxMjAzMTgyMVowVDELMAkGA1UEBhMCQ04xJjAkBgNVBAoMHUJFSUpJ +TkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZMR0wGwYDVQQDDBRCSkNBIEdsb2JhbCBS +b290IENBMjB2MBAGByqGSM49AgEGBSuBBAAiA2IABJ3LgJGNU2e1uVCxA/jlSR9B +IgmwUVJY1is0j8USRhTFiy8shP8sbqjV8QnjAyEUxEM9fMEsxEtqSs3ph+B99iK+ ++kpRuDCK/eHeGBIK9ke35xe/J4rUQUyWPGCWwf0VHKNCMEAwHQYDVR0OBBYEFNJK +sVF/BvDRgh9Obl+rg/xI1LCRMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD +AgEGMAoGCCqGSM49BAMDA2gAMGUCMBq8W9f+qdJUDkpd0m2xQNz0Q9XSSpkZElaA +94M04TVOSG0ED1cxMDAtsaqdAzjbBgIxAMvMh1PLet8gUXOQwKhbYdDFUDn9hf7B +43j4ptZLvZuHjw/l1lOWqzzIQNph91Oj9w== +-----END CERTIFICATE----- + +# Issuer: CN=Sectigo Public Server Authentication Root E46 O=Sectigo Limited +# Subject: CN=Sectigo Public Server Authentication Root E46 O=Sectigo Limited +# Label: "Sectigo Public Server Authentication Root E46" +# Serial: 88989738453351742415770396670917916916 +# MD5 Fingerprint: 28:23:f8:b2:98:5c:37:16:3b:3e:46:13:4e:b0:b3:01 +# SHA1 Fingerprint: ec:8a:39:6c:40:f0:2e:bc:42:75:d4:9f:ab:1c:1a:5b:67:be:d2:9a +# SHA256 Fingerprint: c9:0f:26:f0:fb:1b:40:18:b2:22:27:51:9b:5c:a2:b5:3e:2c:a5:b3:be:5c:f1:8e:fe:1b:ef:47:38:0c:53:83 +-----BEGIN CERTIFICATE----- +MIICOjCCAcGgAwIBAgIQQvLM2htpN0RfFf51KBC49DAKBggqhkjOPQQDAzBfMQsw +CQYDVQQGEwJHQjEYMBYGA1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQDEy1T +ZWN0aWdvIFB1YmxpYyBTZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBFNDYwHhcN +MjEwMzIyMDAwMDAwWhcNNDYwMzIxMjM1OTU5WjBfMQswCQYDVQQGEwJHQjEYMBYG +A1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQDEy1TZWN0aWdvIFB1YmxpYyBT +ZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBFNDYwdjAQBgcqhkjOPQIBBgUrgQQA +IgNiAAR2+pmpbiDt+dd34wc7qNs9Xzjoq1WmVk/WSOrsfy2qw7LFeeyZYX8QeccC +WvkEN/U0NSt3zn8gj1KjAIns1aeibVvjS5KToID1AZTc8GgHHs3u/iVStSBDHBv+ +6xnOQ6OjQjBAMB0GA1UdDgQWBBTRItpMWfFLXyY4qp3W7usNw/upYTAOBgNVHQ8B +Af8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNnADBkAjAn7qRa +qCG76UeXlImldCBteU/IvZNeWBj7LRoAasm4PdCkT0RHlAFWovgzJQxC36oCMB3q +4S6ILuH5px0CMk7yn2xVdOOurvulGu7t0vzCAxHrRVxgED1cf5kDW21USAGKcw== +-----END CERTIFICATE----- + +# Issuer: CN=Sectigo Public Server Authentication Root R46 O=Sectigo Limited +# Subject: CN=Sectigo Public Server Authentication Root R46 O=Sectigo Limited +# Label: "Sectigo Public Server Authentication Root R46" +# Serial: 156256931880233212765902055439220583700 +# MD5 Fingerprint: 32:10:09:52:00:d5:7e:6c:43:df:15:c0:b1:16:93:e5 +# SHA1 Fingerprint: ad:98:f9:f3:e4:7d:75:3b:65:d4:82:b3:a4:52:17:bb:6e:f5:e4:38 +# SHA256 Fingerprint: 7b:b6:47:a6:2a:ee:ac:88:bf:25:7a:a5:22:d0:1f:fe:a3:95:e0:ab:45:c7:3f:93:f6:56:54:ec:38:f2:5a:06 +-----BEGIN CERTIFICATE----- +MIIFijCCA3KgAwIBAgIQdY39i658BwD6qSWn4cetFDANBgkqhkiG9w0BAQwFADBf +MQswCQYDVQQGEwJHQjEYMBYGA1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQD +Ey1TZWN0aWdvIFB1YmxpYyBTZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBSNDYw +HhcNMjEwMzIyMDAwMDAwWhcNNDYwMzIxMjM1OTU5WjBfMQswCQYDVQQGEwJHQjEY +MBYGA1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQDEy1TZWN0aWdvIFB1Ymxp +YyBTZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBSNDYwggIiMA0GCSqGSIb3DQEB +AQUAA4ICDwAwggIKAoICAQCTvtU2UnXYASOgHEdCSe5jtrch/cSV1UgrJnwUUxDa +ef0rty2k1Cz66jLdScK5vQ9IPXtamFSvnl0xdE8H/FAh3aTPaE8bEmNtJZlMKpnz +SDBh+oF8HqcIStw+KxwfGExxqjWMrfhu6DtK2eWUAtaJhBOqbchPM8xQljeSM9xf +iOefVNlI8JhD1mb9nxc4Q8UBUQvX4yMPFF1bFOdLvt30yNoDN9HWOaEhUTCDsG3X +ME6WW5HwcCSrv0WBZEMNvSE6Lzzpng3LILVCJ8zab5vuZDCQOc2TZYEhMbUjUDM3 +IuM47fgxMMxF/mL50V0yeUKH32rMVhlATc6qu/m1dkmU8Sf4kaWD5QazYw6A3OAS +VYCmO2a0OYctyPDQ0RTp5A1NDvZdV3LFOxxHVp3i1fuBYYzMTYCQNFu31xR13NgE +SJ/AwSiItOkcyqex8Va3e0lMWeUgFaiEAin6OJRpmkkGj80feRQXEgyDet4fsZfu ++Zd4KKTIRJLpfSYFplhym3kT2BFfrsU4YjRosoYwjviQYZ4ybPUHNs2iTG7sijbt +8uaZFURww3y8nDnAtOFr94MlI1fZEoDlSfB1D++N6xybVCi0ITz8fAr/73trdf+L +HaAZBav6+CuBQug4urv7qv094PPK306Xlynt8xhW6aWWrL3DkJiy4Pmi1KZHQ3xt +zwIDAQABo0IwQDAdBgNVHQ4EFgQUVnNYZJX5khqwEioEYnmhQBWIIUkwDgYDVR0P +AQH/BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAC9c +mTz8Bl6MlC5w6tIyMY208FHVvArzZJ8HXtXBc2hkeqK5Duj5XYUtqDdFqij0lgVQ +YKlJfp/imTYpE0RHap1VIDzYm/EDMrraQKFz6oOht0SmDpkBm+S8f74TlH7Kph52 +gDY9hAaLMyZlbcp+nv4fjFg4exqDsQ+8FxG75gbMY/qB8oFM2gsQa6H61SilzwZA +Fv97fRheORKkU55+MkIQpiGRqRxOF3yEvJ+M0ejf5lG5Nkc/kLnHvALcWxxPDkjB +JYOcCj+esQMzEhonrPcibCTRAUH4WAP+JWgiH5paPHxsnnVI84HxZmduTILA7rpX +DhjvLpr3Etiga+kFpaHpaPi8TD8SHkXoUsCjvxInebnMMTzD9joiFgOgyY9mpFui +TdaBJQbpdqQACj7LzTWb4OE4y2BThihCQRxEV+ioratF4yUQvNs+ZUH7G6aXD+u5 +dHn5HrwdVw1Hr8Mvn4dGp+smWg9WY7ViYG4A++MnESLn/pmPNPW56MORcr3Ywx65 +LvKRRFHQV80MNNVIIb/bE/FmJUNS0nAiNs2fxBx1IK1jcmMGDw4nztJqDby1ORrp +0XZ60Vzk50lJLVU3aPAaOpg+VBeHVOmmJ1CJeyAvP/+/oYtKR5j/K3tJPsMpRmAY +QqszKbrAKbkTidOIijlBO8n9pu0f9GBj39ItVQGL +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com TLS RSA Root CA 2022 O=SSL Corporation +# Subject: CN=SSL.com TLS RSA Root CA 2022 O=SSL Corporation +# Label: "SSL.com TLS RSA Root CA 2022" +# Serial: 148535279242832292258835760425842727825 +# MD5 Fingerprint: d8:4e:c6:59:30:d8:fe:a0:d6:7a:5a:2c:2c:69:78:da +# SHA1 Fingerprint: ec:2c:83:40:72:af:26:95:10:ff:0e:f2:03:ee:31:70:f6:78:9d:ca +# SHA256 Fingerprint: 8f:af:7d:2e:2c:b4:70:9b:b8:e0:b3:36:66:bf:75:a5:dd:45:b5:de:48:0f:8e:a8:d4:bf:e6:be:bc:17:f2:ed +-----BEGIN CERTIFICATE----- +MIIFiTCCA3GgAwIBAgIQb77arXO9CEDii02+1PdbkTANBgkqhkiG9w0BAQsFADBO +MQswCQYDVQQGEwJVUzEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMSUwIwYDVQQD +DBxTU0wuY29tIFRMUyBSU0EgUm9vdCBDQSAyMDIyMB4XDTIyMDgyNTE2MzQyMloX +DTQ2MDgxOTE2MzQyMVowTjELMAkGA1UEBhMCVVMxGDAWBgNVBAoMD1NTTCBDb3Jw +b3JhdGlvbjElMCMGA1UEAwwcU1NMLmNvbSBUTFMgUlNBIFJvb3QgQ0EgMjAyMjCC +AiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANCkCXJPQIgSYT41I57u9nTP +L3tYPc48DRAokC+X94xI2KDYJbFMsBFMF3NQ0CJKY7uB0ylu1bUJPiYYf7ISf5OY +t6/wNr/y7hienDtSxUcZXXTzZGbVXcdotL8bHAajvI9AI7YexoS9UcQbOcGV0ins +S657Lb85/bRi3pZ7QcacoOAGcvvwB5cJOYF0r/c0WRFXCsJbwST0MXMwgsadugL3 +PnxEX4MN8/HdIGkWCVDi1FW24IBydm5MR7d1VVm0U3TZlMZBrViKMWYPHqIbKUBO +L9975hYsLfy/7PO0+r4Y9ptJ1O4Fbtk085zx7AGL0SDGD6C1vBdOSHtRwvzpXGk3 +R2azaPgVKPC506QVzFpPulJwoxJF3ca6TvvC0PeoUidtbnm1jPx7jMEWTO6Af77w +dr5BUxIzrlo4QqvXDz5BjXYHMtWrifZOZ9mxQnUjbvPNQrL8VfVThxc7wDNY8VLS ++YCk8OjwO4s4zKTGkH8PnP2L0aPP2oOnaclQNtVcBdIKQXTbYxE3waWglksejBYS +d66UNHsef8JmAOSqg+qKkK3ONkRN0VHpvB/zagX9wHQfJRlAUW7qglFA35u5CCoG +AtUjHBPW6dvbxrB6y3snm/vg1UYk7RBLY0ulBY+6uB0rpvqR4pJSvezrZ5dtmi2f +gTIFZzL7SAg/2SW4BCUvAgMBAAGjYzBhMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0j +BBgwFoAU+y437uOEeicuzRk1sTN8/9REQrkwHQYDVR0OBBYEFPsuN+7jhHonLs0Z +NbEzfP/UREK5MA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAjYlt +hEUY8U+zoO9opMAdrDC8Z2awms22qyIZZtM7QbUQnRC6cm4pJCAcAZli05bg4vsM +QtfhWsSWTVTNj8pDU/0quOr4ZcoBwq1gaAafORpR2eCNJvkLTqVTJXojpBzOCBvf +R4iyrT7gJ4eLSYwfqUdYe5byiB0YrrPRpgqU+tvT5TgKa3kSM/tKWTcWQA673vWJ +DPFs0/dRa1419dvAJuoSc06pkZCmF8NsLzjUo3KUQyxi4U5cMj29TH0ZR6LDSeeW +P4+a0zvkEdiLA9z2tmBVGKaBUfPhqBVq6+AL8BQx1rmMRTqoENjwuSfr98t67wVy +lrXEj5ZzxOhWc5y8aVFjvO9nHEMaX3cZHxj4HCUp+UmZKbaSPaKDN7EgkaibMOlq +bLQjk2UEqxHzDh1TJElTHaE/nUiSEeJ9DU/1172iWD54nR4fK/4huxoTtrEoZP2w +AgDHbICivRZQIA9ygV/MlP+7mea6kMvq+cYMwq7FGc4zoWtcu358NFcXrfA/rs3q +r5nsLFR+jM4uElZI7xc7P0peYNLcdDa8pUNjyw9bowJWCZ4kLOGGgYz+qxcs+sji +Mho6/4UIyYOf8kpIEFR3N+2ivEC+5BB09+Rbu7nzifmPQdjH5FCQNYA+HLhNkNPU +98OwoX6EyneSMSy4kLGCenROmxMmtNVQZlR4rmA= +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com TLS ECC Root CA 2022 O=SSL Corporation +# Subject: CN=SSL.com TLS ECC Root CA 2022 O=SSL Corporation +# Label: "SSL.com TLS ECC Root CA 2022" +# Serial: 26605119622390491762507526719404364228 +# MD5 Fingerprint: 99:d7:5c:f1:51:36:cc:e9:ce:d9:19:2e:77:71:56:c5 +# SHA1 Fingerprint: 9f:5f:d9:1a:54:6d:f5:0c:71:f0:ee:7a:bd:17:49:98:84:73:e2:39 +# SHA256 Fingerprint: c3:2f:fd:9f:46:f9:36:d1:6c:36:73:99:09:59:43:4b:9a:d6:0a:af:bb:9e:7c:f3:36:54:f1:44:cc:1b:a1:43 +-----BEGIN CERTIFICATE----- +MIICOjCCAcCgAwIBAgIQFAP1q/s3ixdAW+JDsqXRxDAKBggqhkjOPQQDAzBOMQsw +CQYDVQQGEwJVUzEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMSUwIwYDVQQDDBxT +U0wuY29tIFRMUyBFQ0MgUm9vdCBDQSAyMDIyMB4XDTIyMDgyNTE2MzM0OFoXDTQ2 +MDgxOTE2MzM0N1owTjELMAkGA1UEBhMCVVMxGDAWBgNVBAoMD1NTTCBDb3Jwb3Jh +dGlvbjElMCMGA1UEAwwcU1NMLmNvbSBUTFMgRUNDIFJvb3QgQ0EgMjAyMjB2MBAG +ByqGSM49AgEGBSuBBAAiA2IABEUpNXP6wrgjzhR9qLFNoFs27iosU8NgCTWyJGYm +acCzldZdkkAZDsalE3D07xJRKF3nzL35PIXBz5SQySvOkkJYWWf9lCcQZIxPBLFN +SeR7T5v15wj4A4j3p8OSSxlUgaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAfBgNVHSME +GDAWgBSJjy+j6CugFFR781a4Jl9nOAuc0DAdBgNVHQ4EFgQUiY8vo+groBRUe/NW +uCZfZzgLnNAwDgYDVR0PAQH/BAQDAgGGMAoGCCqGSM49BAMDA2gAMGUCMFXjIlbp +15IkWE8elDIPDAI2wv2sdDJO4fscgIijzPvX6yv/N33w7deedWo1dlJF4AIxAMeN +b0Igj762TVntd00pxCAgRWSGOlDGxK0tk/UYfXLtqc/ErFc2KAhl3zx5Zn6g6g== +-----END CERTIFICATE----- + +# Issuer: CN=Atos TrustedRoot Root CA ECC TLS 2021 O=Atos +# Subject: CN=Atos TrustedRoot Root CA ECC TLS 2021 O=Atos +# Label: "Atos TrustedRoot Root CA ECC TLS 2021" +# Serial: 81873346711060652204712539181482831616 +# MD5 Fingerprint: 16:9f:ad:f1:70:ad:79:d6:ed:29:b4:d1:c5:79:70:a8 +# SHA1 Fingerprint: 9e:bc:75:10:42:b3:02:f3:81:f4:f7:30:62:d4:8f:c3:a7:51:b2:dd +# SHA256 Fingerprint: b2:fa:e5:3e:14:cc:d7:ab:92:12:06:47:01:ae:27:9c:1d:89:88:fa:cb:77:5f:a8:a0:08:91:4e:66:39:88:a8 +-----BEGIN CERTIFICATE----- +MIICFTCCAZugAwIBAgIQPZg7pmY9kGP3fiZXOATvADAKBggqhkjOPQQDAzBMMS4w +LAYDVQQDDCVBdG9zIFRydXN0ZWRSb290IFJvb3QgQ0EgRUNDIFRMUyAyMDIxMQ0w +CwYDVQQKDARBdG9zMQswCQYDVQQGEwJERTAeFw0yMTA0MjIwOTI2MjNaFw00MTA0 +MTcwOTI2MjJaMEwxLjAsBgNVBAMMJUF0b3MgVHJ1c3RlZFJvb3QgUm9vdCBDQSBF +Q0MgVExTIDIwMjExDTALBgNVBAoMBEF0b3MxCzAJBgNVBAYTAkRFMHYwEAYHKoZI +zj0CAQYFK4EEACIDYgAEloZYKDcKZ9Cg3iQZGeHkBQcfl+3oZIK59sRxUM6KDP/X +tXa7oWyTbIOiaG6l2b4siJVBzV3dscqDY4PMwL502eCdpO5KTlbgmClBk1IQ1SQ4 +AjJn8ZQSb+/Xxd4u/RmAo0IwQDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBR2 +KCXWfeBmmnoJsmo7jjPXNtNPojAOBgNVHQ8BAf8EBAMCAYYwCgYIKoZIzj0EAwMD +aAAwZQIwW5kp85wxtolrbNa9d+F851F+uDrNozZffPc8dz7kUK2o59JZDCaOMDtu +CCrCp1rIAjEAmeMM56PDr9NJLkaCI2ZdyQAUEv049OGYa3cpetskz2VAv9LcjBHo +9H1/IISpQuQo +-----END CERTIFICATE----- + +# Issuer: CN=Atos TrustedRoot Root CA RSA TLS 2021 O=Atos +# Subject: CN=Atos TrustedRoot Root CA RSA TLS 2021 O=Atos +# Label: "Atos TrustedRoot Root CA RSA TLS 2021" +# Serial: 111436099570196163832749341232207667876 +# MD5 Fingerprint: d4:d3:46:b8:9a:c0:9c:76:5d:9e:3a:c3:b9:99:31:d2 +# SHA1 Fingerprint: 18:52:3b:0d:06:37:e4:d6:3a:df:23:e4:98:fb:5b:16:fb:86:74:48 +# SHA256 Fingerprint: 81:a9:08:8e:a5:9f:b3:64:c5:48:a6:f8:55:59:09:9b:6f:04:05:ef:bf:18:e5:32:4e:c9:f4:57:ba:00:11:2f +-----BEGIN CERTIFICATE----- +MIIFZDCCA0ygAwIBAgIQU9XP5hmTC/srBRLYwiqipDANBgkqhkiG9w0BAQwFADBM +MS4wLAYDVQQDDCVBdG9zIFRydXN0ZWRSb290IFJvb3QgQ0EgUlNBIFRMUyAyMDIx +MQ0wCwYDVQQKDARBdG9zMQswCQYDVQQGEwJERTAeFw0yMTA0MjIwOTIxMTBaFw00 +MTA0MTcwOTIxMDlaMEwxLjAsBgNVBAMMJUF0b3MgVHJ1c3RlZFJvb3QgUm9vdCBD +QSBSU0EgVExTIDIwMjExDTALBgNVBAoMBEF0b3MxCzAJBgNVBAYTAkRFMIICIjAN +BgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAtoAOxHm9BYx9sKOdTSJNy/BBl01Z +4NH+VoyX8te9j2y3I49f1cTYQcvyAh5x5en2XssIKl4w8i1mx4QbZFc4nXUtVsYv +Ye+W/CBGvevUez8/fEc4BKkbqlLfEzfTFRVOvV98r61jx3ncCHvVoOX3W3WsgFWZ +kmGbzSoXfduP9LVq6hdKZChmFSlsAvFr1bqjM9xaZ6cF4r9lthawEO3NUDPJcFDs +GY6wx/J0W2tExn2WuZgIWWbeKQGb9Cpt0xU6kGpn8bRrZtkh68rZYnxGEFzedUln +nkL5/nWpo63/dgpnQOPF943HhZpZnmKaau1Fh5hnstVKPNe0OwANwI8f4UDErmwh +3El+fsqyjW22v5MvoVw+j8rtgI5Y4dtXz4U2OLJxpAmMkokIiEjxQGMYsluMWuPD +0xeqqxmjLBvk1cbiZnrXghmmOxYsL3GHX0WelXOTwkKBIROW1527k2gV+p2kHYzy +geBYBr3JtuP2iV2J+axEoctr+hbxx1A9JNr3w+SH1VbxT5Aw+kUJWdo0zuATHAR8 +ANSbhqRAvNncTFd+rrcztl524WWLZt+NyteYr842mIycg5kDcPOvdO3GDjbnvezB +c6eUWsuSZIKmAMFwoW4sKeFYV+xafJlrJaSQOoD0IJ2azsct+bJLKZWD6TWNp0lI +pw9MGZHQ9b8Q4HECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU +dEmZ0f+0emhFdcN+tNzMzjkz2ggwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEB +DAUAA4ICAQAjQ1MkYlxt/T7Cz1UAbMVWiLkO3TriJQ2VSpfKgInuKs1l+NsW4AmS +4BjHeJi78+xCUvuppILXTdiK/ORO/auQxDh1MoSf/7OwKwIzNsAQkG8dnK/haZPs +o0UvFJ/1TCplQ3IM98P4lYsU84UgYt1UU90s3BiVaU+DR3BAM1h3Egyi61IxHkzJ +qM7F78PRreBrAwA0JrRUITWXAdxfG/F851X6LWh3e9NpzNMOa7pNdkTWwhWaJuyw +xfW70Xp0wmzNxbVe9kzmWy2B27O3Opee7c9GslA9hGCZcbUztVdF5kJHdWoOsAgM +rr3e97sPWD2PAzHoPYJQyi9eDF20l74gNAf0xBLh7tew2VktafcxBPTy+av5EzH4 +AXcOPUIjJsyacmdRIXrMPIWo6iFqO9taPKU0nprALN+AnCng33eU0aKAQv9qTFsR +0PXNor6uzFFcw9VUewyu1rkGd4Di7wcaaMxZUa1+XGdrudviB0JbuAEFWDlN5LuY +o7Ey7Nmj1m+UI/87tyll5gfp77YZ6ufCOB0yiJA8EytuzO+rdwY0d4RPcuSBhPm5 +dDTedk+SKlOxJTnbPP/lPqYO5Wue/9vsL3SD3460s6neFE3/MaNFcyT6lSnMEpcE +oji2jbDwN/zIIX8/syQbPYtuzE2wFg2WHYMfRsCbvUOZ58SWLs5fyQ== +-----END CERTIFICATE----- diff --git a/packages/certifi/core.py b/packages/certifi/core.py index 5d2b8cd32..de028981b 100644 --- a/packages/certifi/core.py +++ b/packages/certifi/core.py @@ -1,20 +1,20 @@ -# -*- coding: utf-8 -*- - """ certifi.py ~~~~~~~~~~ This module returns the installation location of cacert.pem or its contents. """ -import os +import sys -try: - from importlib.resources import path as get_path, read_text + +if sys.version_info >= (3, 11): + + from importlib.resources import as_file, files _CACERT_CTX = None _CACERT_PATH = None - def where(): + def where() -> str: # This is slightly terrible, but we want to delay extracting the file # in cases where we're inside of a zipimport situation until someone # actually calls where(), but we don't want to re-extract the file @@ -33,28 +33,76 @@ def where(): # We also have to hold onto the actual context manager, because # it will do the cleanup whenever it gets garbage collected, so # we will also store that at the global level as well. + _CACERT_CTX = as_file(files("certifi").joinpath("cacert.pem")) + _CACERT_PATH = str(_CACERT_CTX.__enter__()) + + return _CACERT_PATH + + def contents() -> str: + return files("certifi").joinpath("cacert.pem").read_text(encoding="ascii") + +elif sys.version_info >= (3, 7): + + from importlib.resources import path as get_path, read_text + + _CACERT_CTX = None + _CACERT_PATH = None + + def where() -> str: + # This is slightly terrible, but we want to delay extracting the + # file in cases where we're inside of a zipimport situation until + # someone actually calls where(), but we don't want to re-extract + # the file on every call of where(), so we'll do it once then store + # it in a global variable. + global _CACERT_CTX + global _CACERT_PATH + if _CACERT_PATH is None: + # This is slightly janky, the importlib.resources API wants you + # to manage the cleanup of this file, so it doesn't actually + # return a path, it returns a context manager that will give + # you the path when you enter it and will do any cleanup when + # you leave it. In the common case of not needing a temporary + # file, it will just return the file system location and the + # __exit__() is a no-op. + # + # We also have to hold onto the actual context manager, because + # it will do the cleanup whenever it gets garbage collected, so + # we will also store that at the global level as well. _CACERT_CTX = get_path("certifi", "cacert.pem") _CACERT_PATH = str(_CACERT_CTX.__enter__()) return _CACERT_PATH + def contents() -> str: + return read_text("certifi", "cacert.pem", encoding="ascii") + +else: + import os + import types + from typing import Union + + Package = Union[types.ModuleType, str] + Resource = Union[str, "os.PathLike"] -except ImportError: # This fallback will work for Python versions prior to 3.7 that lack the # importlib.resources module but relies on the existing `where` function # so won't address issues with environments like PyOxidizer that don't set # __file__ on modules. - def read_text(_module, _path, encoding="ascii"): - with open(where(), "r", encoding=encoding) as data: + def read_text( + package: Package, + resource: Resource, + encoding: str = 'utf-8', + errors: str = 'strict' + ) -> str: + with open(where(), encoding=encoding) as data: return data.read() # If we don't have importlib.resources, then we will just do the old logic # of assuming we're on the filesystem and munge the path directly. - def where(): + def where() -> str: f = os.path.dirname(__file__) return os.path.join(f, "cacert.pem") - -def contents(): - return read_text("certifi", "cacert.pem", encoding="ascii") + def contents() -> str: + return read_text("certifi", "cacert.pem", encoding="ascii") diff --git a/packages/certifi/py.typed b/packages/certifi/py.typed new file mode 100644 index 000000000..e69de29bb diff --git a/packages/h11/__init__.py b/packages/h11/__init__.py index ae39e0120..989e92c34 100644 --- a/packages/h11/__init__.py +++ b/packages/h11/__init__.py @@ -6,16 +6,57 @@ # semantics to check that what you're asking to write to the wire is sensible, # but at least it gets you out of dealing with the wire itself. -from ._connection import * -from ._events import * -from ._state import * -from ._util import LocalProtocolError, ProtocolError, RemoteProtocolError -from ._version import __version__ +from h11._connection import Connection, NEED_DATA, PAUSED +from h11._events import ( + ConnectionClosed, + Data, + EndOfMessage, + Event, + InformationalResponse, + Request, + Response, +) +from h11._state import ( + CLIENT, + CLOSED, + DONE, + ERROR, + IDLE, + MIGHT_SWITCH_PROTOCOL, + MUST_CLOSE, + SEND_BODY, + SEND_RESPONSE, + SERVER, + SWITCHED_PROTOCOL, +) +from h11._util import LocalProtocolError, ProtocolError, RemoteProtocolError +from h11._version import __version__ PRODUCT_ID = "python-h11/" + __version__ -__all__ = ["ProtocolError", "LocalProtocolError", "RemoteProtocolError"] -__all__ += _events.__all__ -__all__ += _connection.__all__ -__all__ += _state.__all__ +__all__ = ( + "Connection", + "NEED_DATA", + "PAUSED", + "ConnectionClosed", + "Data", + "EndOfMessage", + "Event", + "InformationalResponse", + "Request", + "Response", + "CLIENT", + "CLOSED", + "DONE", + "ERROR", + "IDLE", + "MUST_CLOSE", + "SEND_BODY", + "SEND_RESPONSE", + "SERVER", + "SWITCHED_PROTOCOL", + "ProtocolError", + "LocalProtocolError", + "RemoteProtocolError", +) diff --git a/packages/h11/_abnf.py b/packages/h11/_abnf.py index e6d49e1ea..933587fba 100644 --- a/packages/h11/_abnf.py +++ b/packages/h11/_abnf.py @@ -125,5 +125,8 @@ chunk_header = ( r"(?P{chunk_size})" r"(?P{chunk_ext})?" - r"\r\n".format(**globals()) + r"{OWS}\r\n".format( + **globals() + ) # Even though the specification does not allow for extra whitespaces, + # we are lenient with trailing whitespaces because some servers on the wild use it. ) diff --git a/packages/h11/_connection.py b/packages/h11/_connection.py index 6f796ef51..d17527075 100644 --- a/packages/h11/_connection.py +++ b/packages/h11/_connection.py @@ -1,28 +1,53 @@ # This contains the main Connection class. Everything in h11 revolves around # this. - -from ._events import * # Import all event types +from typing import Any, Callable, cast, Dict, List, Optional, Tuple, Type, Union + +from ._events import ( + ConnectionClosed, + Data, + EndOfMessage, + Event, + InformationalResponse, + Request, + Response, +) from ._headers import get_comma_header, has_expect_100_continue, set_comma_header -from ._readers import READERS +from ._readers import READERS, ReadersType from ._receivebuffer import ReceiveBuffer -from ._state import * # Import all state sentinels -from ._state import _SWITCH_CONNECT, _SWITCH_UPGRADE, ConnectionState +from ._state import ( + _SWITCH_CONNECT, + _SWITCH_UPGRADE, + CLIENT, + ConnectionState, + DONE, + ERROR, + MIGHT_SWITCH_PROTOCOL, + SEND_BODY, + SERVER, + SWITCHED_PROTOCOL, +) from ._util import ( # Import the internal things we need LocalProtocolError, - make_sentinel, RemoteProtocolError, + Sentinel, ) -from ._writers import WRITERS +from ._writers import WRITERS, WritersType # Everything in __all__ gets re-exported as part of the h11 public API. __all__ = ["Connection", "NEED_DATA", "PAUSED"] -NEED_DATA = make_sentinel("NEED_DATA") -PAUSED = make_sentinel("PAUSED") + +class NEED_DATA(Sentinel, metaclass=Sentinel): + pass + + +class PAUSED(Sentinel, metaclass=Sentinel): + pass + # If we ever have this much buffered without it making a complete parseable # event, we error out. The only time we really buffer is when reading the -# request/reponse line + headers together, so this is effectively the limit on +# request/response line + headers together, so this is effectively the limit on # the size of that. # # Some precedents for defaults: @@ -44,7 +69,7 @@ # our rule is: # - If someone says Connection: close, we will close # - If someone uses HTTP/1.0, we will close. -def _keep_alive(event): +def _keep_alive(event: Union[Request, Response]) -> bool: connection = get_comma_header(event.headers, b"connection") if b"close" in connection: return False @@ -53,7 +78,9 @@ def _keep_alive(event): return True -def _body_framing(request_method, event): +def _body_framing( + request_method: bytes, event: Union[Request, Response] +) -> Tuple[str, Union[Tuple[()], Tuple[int]]]: # Called when we enter SEND_BODY to figure out framing information for # this body. # @@ -126,13 +153,16 @@ class Connection: """ def __init__( - self, our_role, max_incomplete_event_size=DEFAULT_MAX_INCOMPLETE_EVENT_SIZE - ): + self, + our_role: Type[Sentinel], + max_incomplete_event_size: int = DEFAULT_MAX_INCOMPLETE_EVENT_SIZE, + ) -> None: self._max_incomplete_event_size = max_incomplete_event_size # State and role tracking if our_role not in (CLIENT, SERVER): raise ValueError("expected CLIENT or SERVER, not {!r}".format(our_role)) self.our_role = our_role + self.their_role: Type[Sentinel] if our_role is CLIENT: self.their_role = SERVER else: @@ -155,14 +185,14 @@ def __init__( # These two are only used to interpret framing headers for figuring # out how to read/write response bodies. their_http_version is also # made available as a convenient public API. - self.their_http_version = None - self._request_method = None + self.their_http_version: Optional[bytes] = None + self._request_method: Optional[bytes] = None # This is pure flow-control and doesn't at all affect the set of legal # transitions, so no need to bother ConnectionState with it: self.client_is_waiting_for_100_continue = False @property - def states(self): + def states(self) -> Dict[Type[Sentinel], Type[Sentinel]]: """A dictionary like:: {CLIENT: , SERVER: } @@ -173,24 +203,24 @@ def states(self): return dict(self._cstate.states) @property - def our_state(self): + def our_state(self) -> Type[Sentinel]: """The current state of whichever role we are playing. See :ref:`state-machine` for details. """ return self._cstate.states[self.our_role] @property - def their_state(self): + def their_state(self) -> Type[Sentinel]: """The current state of whichever role we are NOT playing. See :ref:`state-machine` for details. """ return self._cstate.states[self.their_role] @property - def they_are_waiting_for_100_continue(self): + def they_are_waiting_for_100_continue(self) -> bool: return self.their_role is CLIENT and self.client_is_waiting_for_100_continue - def start_next_cycle(self): + def start_next_cycle(self) -> None: """Attempt to reset our connection state for a new request/response cycle. @@ -210,12 +240,12 @@ def start_next_cycle(self): assert not self.client_is_waiting_for_100_continue self._respond_to_state_changes(old_states) - def _process_error(self, role): + def _process_error(self, role: Type[Sentinel]) -> None: old_states = dict(self._cstate.states) self._cstate.process_error(role) self._respond_to_state_changes(old_states) - def _server_switch_event(self, event): + def _server_switch_event(self, event: Event) -> Optional[Type[Sentinel]]: if type(event) is InformationalResponse and event.status_code == 101: return _SWITCH_UPGRADE if type(event) is Response: @@ -227,7 +257,7 @@ def _server_switch_event(self, event): return None # All events go through here - def _process_event(self, role, event): + def _process_event(self, role: Type[Sentinel], event: Event) -> None: # First, pass the event through the state machine to make sure it # succeeds. old_states = dict(self._cstate.states) @@ -243,16 +273,15 @@ def _process_event(self, role, event): # Then perform the updates triggered by it. - # self._request_method if type(event) is Request: self._request_method = event.method - # self.their_http_version if role is self.their_role and type(event) in ( Request, Response, InformationalResponse, ): + event = cast(Union[Request, Response, InformationalResponse], event) self.their_http_version = event.http_version # Keep alive handling @@ -261,7 +290,9 @@ def _process_event(self, role, event): # shows up on a 1xx InformationalResponse. I think the idea is that # this is not supposed to happen. In any case, if it does happen, we # ignore it. - if type(event) in (Request, Response) and not _keep_alive(event): + if type(event) in (Request, Response) and not _keep_alive( + cast(Union[Request, Response], event) + ): self._cstate.process_keep_alive_disabled() # 100-continue @@ -274,22 +305,33 @@ def _process_event(self, role, event): self._respond_to_state_changes(old_states, event) - def _get_io_object(self, role, event, io_dict): + def _get_io_object( + self, + role: Type[Sentinel], + event: Optional[Event], + io_dict: Union[ReadersType, WritersType], + ) -> Optional[Callable[..., Any]]: # event may be None; it's only used when entering SEND_BODY state = self._cstate.states[role] if state is SEND_BODY: # Special case: the io_dict has a dict of reader/writer factories # that depend on the request/response framing. - framing_type, args = _body_framing(self._request_method, event) - return io_dict[SEND_BODY][framing_type](*args) + framing_type, args = _body_framing( + cast(bytes, self._request_method), cast(Union[Request, Response], event) + ) + return io_dict[SEND_BODY][framing_type](*args) # type: ignore[index] else: # General case: the io_dict just has the appropriate reader/writer # for this state - return io_dict.get((role, state)) + return io_dict.get((role, state)) # type: ignore[return-value] # This must be called after any action that might have caused # self._cstate.states to change. - def _respond_to_state_changes(self, old_states, event=None): + def _respond_to_state_changes( + self, + old_states: Dict[Type[Sentinel], Type[Sentinel]], + event: Optional[Event] = None, + ) -> None: # Update reader/writer if self.our_state != old_states[self.our_role]: self._writer = self._get_io_object(self.our_role, event, WRITERS) @@ -297,7 +339,7 @@ def _respond_to_state_changes(self, old_states, event=None): self._reader = self._get_io_object(self.their_role, event, READERS) @property - def trailing_data(self): + def trailing_data(self) -> Tuple[bytes, bool]: """Data that has been received, but not yet processed, represented as a tuple with two elements, where the first is a byte-string containing the unprocessed data itself, and the second is a bool that is True if @@ -307,7 +349,7 @@ def trailing_data(self): """ return (bytes(self._receive_buffer), self._receive_buffer_closed) - def receive_data(self, data): + def receive_data(self, data: bytes) -> None: """Add data to our internal receive buffer. This does not actually do any processing on the data, just stores @@ -353,7 +395,9 @@ def receive_data(self, data): else: self._receive_buffer_closed = True - def _extract_next_receive_event(self): + def _extract_next_receive_event( + self, + ) -> Union[Event, Type[NEED_DATA], Type[PAUSED]]: state = self.their_state # We don't pause immediately when they enter DONE, because even in # DONE state we can still process a ConnectionClosed() event. But @@ -372,14 +416,14 @@ def _extract_next_receive_event(self): # return that event, and then the state will change and we'll # get called again to generate the actual ConnectionClosed(). if hasattr(self._reader, "read_eof"): - event = self._reader.read_eof() + event = self._reader.read_eof() # type: ignore[attr-defined] else: event = ConnectionClosed() if event is None: event = NEED_DATA - return event + return event # type: ignore[no-any-return] - def next_event(self): + def next_event(self) -> Union[Event, Type[NEED_DATA], Type[PAUSED]]: """Parse the next event out of our receive buffer, update our internal state, and return it. @@ -424,7 +468,7 @@ def next_event(self): try: event = self._extract_next_receive_event() if event not in [NEED_DATA, PAUSED]: - self._process_event(self.their_role, event) + self._process_event(self.their_role, cast(Event, event)) if event is NEED_DATA: if len(self._receive_buffer) > self._max_incomplete_event_size: # 431 is "Request header fields too large" which is pretty @@ -444,7 +488,7 @@ def next_event(self): else: raise - def send(self, event): + def send(self, event: Event) -> Optional[bytes]: """Convert a high-level event into bytes that can be sent to the peer, while updating our internal state machine. @@ -471,7 +515,7 @@ def send(self, event): else: return b"".join(data_list) - def send_with_data_passthrough(self, event): + def send_with_data_passthrough(self, event: Event) -> Optional[List[bytes]]: """Identical to :meth:`send`, except that in situations where :meth:`send` returns a single :term:`bytes-like object`, this instead returns a list of them -- and when sending a :class:`Data` event, this @@ -483,7 +527,7 @@ def send_with_data_passthrough(self, event): raise LocalProtocolError("Can't send data when our state is ERROR") try: if type(event) is Response: - self._clean_up_response_headers_for_sending(event) + event = self._clean_up_response_headers_for_sending(event) # We want to call _process_event before calling the writer, # because if someone tries to do something invalid then this will # give a sensible error message, while our writers all just assume @@ -497,14 +541,14 @@ def send_with_data_passthrough(self, event): # In any situation where writer is None, process_event should # have raised ProtocolError assert writer is not None - data_list = [] + data_list: List[bytes] = [] writer(event, data_list.append) return data_list except: self._process_error(self.our_role) raise - def send_failed(self): + def send_failed(self) -> None: """Notify the state machine that we failed to send the data it gave us. @@ -528,9 +572,8 @@ def send_failed(self): # # This function's *only* responsibility is making sure headers are set up # right -- everything downstream just looks at the headers. There are no - # side channels. It mutates the response event in-place (but not the - # response.headers list object). - def _clean_up_response_headers_for_sending(self, response): + # side channels. + def _clean_up_response_headers_for_sending(self, response: Response) -> Response: assert type(response) is Response headers = response.headers @@ -543,7 +586,7 @@ def _clean_up_response_headers_for_sending(self, response): # we're allowed to leave out the framing headers -- see # https://tools.ietf.org/html/rfc7231#section-4.3.2 . But it's just as # easy to get them right.) - method_for_choosing_headers = self._request_method + method_for_choosing_headers = cast(bytes, self._request_method) if method_for_choosing_headers == b"HEAD": method_for_choosing_headers = b"GET" framing_type, _ = _body_framing(method_for_choosing_headers, response) @@ -573,7 +616,7 @@ def _clean_up_response_headers_for_sending(self, response): if self._request_method != b"HEAD": need_close = True else: - headers = set_comma_header(headers, b"transfer-encoding", ["chunked"]) + headers = set_comma_header(headers, b"transfer-encoding", [b"chunked"]) if not self._cstate.keep_alive or need_close: # Make sure Connection: close is set @@ -582,4 +625,9 @@ def _clean_up_response_headers_for_sending(self, response): connection.add(b"close") headers = set_comma_header(headers, b"connection", sorted(connection)) - response.headers = headers + return Response( + headers=headers, + status_code=response.status_code, + http_version=response.http_version, + reason=response.reason, + ) diff --git a/packages/h11/_events.py b/packages/h11/_events.py index 182793011..075bf8a46 100644 --- a/packages/h11/_events.py +++ b/packages/h11/_events.py @@ -6,13 +6,17 @@ # Don't subclass these. Stuff will break. import re +from abc import ABC +from dataclasses import dataclass, field +from typing import Any, cast, Dict, List, Tuple, Union -from . import _headers -from ._abnf import request_target +from ._abnf import method, request_target +from ._headers import Headers, normalize_and_validate from ._util import bytesify, LocalProtocolError, validate # Everything in __all__ gets re-exported as part of the h11 public API. __all__ = [ + "Event", "Request", "InformationalResponse", "Response", @@ -21,75 +25,20 @@ "ConnectionClosed", ] +method_re = re.compile(method.encode("ascii")) request_target_re = re.compile(request_target.encode("ascii")) -class _EventBundle: - _fields = [] - _defaults = {} - - def __init__(self, **kwargs): - _parsed = kwargs.pop("_parsed", False) - allowed = set(self._fields) - for kwarg in kwargs: - if kwarg not in allowed: - raise TypeError( - "unrecognized kwarg {} for {}".format( - kwarg, self.__class__.__name__ - ) - ) - required = allowed.difference(self._defaults) - for field in required: - if field not in kwargs: - raise TypeError( - "missing required kwarg {} for {}".format( - field, self.__class__.__name__ - ) - ) - self.__dict__.update(self._defaults) - self.__dict__.update(kwargs) - - # Special handling for some fields - - if "headers" in self.__dict__: - self.headers = _headers.normalize_and_validate( - self.headers, _parsed=_parsed - ) - - if not _parsed: - for field in ["method", "target", "http_version", "reason"]: - if field in self.__dict__: - self.__dict__[field] = bytesify(self.__dict__[field]) - - if "status_code" in self.__dict__: - if not isinstance(self.status_code, int): - raise LocalProtocolError("status code must be integer") - # Because IntEnum objects are instances of int, but aren't - # duck-compatible (sigh), see gh-72. - self.status_code = int(self.status_code) - - self._validate() - - def _validate(self): - pass - - def __repr__(self): - name = self.__class__.__name__ - kwarg_strs = [ - "{}={}".format(field, self.__dict__[field]) for field in self._fields - ] - kwarg_str = ", ".join(kwarg_strs) - return "{}({})".format(name, kwarg_str) - - # Useful for tests - def __eq__(self, other): - return self.__class__ == other.__class__ and self.__dict__ == other.__dict__ +class Event(ABC): + """ + Base class for h11 events. + """ - # This is an unhashable type. - __hash__ = None + __slots__ = () -class Request(_EventBundle): +@dataclass(init=False, frozen=True) +class Request(Event): """The beginning of an HTTP request. Fields: @@ -123,10 +72,38 @@ class Request(_EventBundle): """ - _fields = ["method", "target", "headers", "http_version"] - _defaults = {"http_version": b"1.1"} + __slots__ = ("method", "headers", "target", "http_version") + + method: bytes + headers: Headers + target: bytes + http_version: bytes + + def __init__( + self, + *, + method: Union[bytes, str], + headers: Union[Headers, List[Tuple[bytes, bytes]], List[Tuple[str, str]]], + target: Union[bytes, str], + http_version: Union[bytes, str] = b"1.1", + _parsed: bool = False, + ) -> None: + super().__init__() + if isinstance(headers, Headers): + object.__setattr__(self, "headers", headers) + else: + object.__setattr__( + self, "headers", normalize_and_validate(headers, _parsed=_parsed) + ) + if not _parsed: + object.__setattr__(self, "method", bytesify(method)) + object.__setattr__(self, "target", bytesify(target)) + object.__setattr__(self, "http_version", bytesify(http_version)) + else: + object.__setattr__(self, "method", method) + object.__setattr__(self, "target", target) + object.__setattr__(self, "http_version", http_version) - def _validate(self): # "A server MUST respond with a 400 (Bad Request) status code to any # HTTP/1.1 request message that lacks a Host header field and to any # request message that contains more than one Host header field or a @@ -141,14 +118,61 @@ def _validate(self): if host_count > 1: raise LocalProtocolError("Found multiple Host: headers") + validate(method_re, self.method, "Illegal method characters") validate(request_target_re, self.target, "Illegal target characters") + # This is an unhashable type. + __hash__ = None # type: ignore + + +@dataclass(init=False, frozen=True) +class _ResponseBase(Event): + __slots__ = ("headers", "http_version", "reason", "status_code") + + headers: Headers + http_version: bytes + reason: bytes + status_code: int + + def __init__( + self, + *, + headers: Union[Headers, List[Tuple[bytes, bytes]], List[Tuple[str, str]]], + status_code: int, + http_version: Union[bytes, str] = b"1.1", + reason: Union[bytes, str] = b"", + _parsed: bool = False, + ) -> None: + super().__init__() + if isinstance(headers, Headers): + object.__setattr__(self, "headers", headers) + else: + object.__setattr__( + self, "headers", normalize_and_validate(headers, _parsed=_parsed) + ) + if not _parsed: + object.__setattr__(self, "reason", bytesify(reason)) + object.__setattr__(self, "http_version", bytesify(http_version)) + if not isinstance(status_code, int): + raise LocalProtocolError("status code must be integer") + # Because IntEnum objects are instances of int, but aren't + # duck-compatible (sigh), see gh-72. + object.__setattr__(self, "status_code", int(status_code)) + else: + object.__setattr__(self, "reason", reason) + object.__setattr__(self, "http_version", http_version) + object.__setattr__(self, "status_code", status_code) + + self.__post_init__() + + def __post_init__(self) -> None: + pass -class _ResponseBase(_EventBundle): - _fields = ["status_code", "headers", "http_version", "reason"] - _defaults = {"http_version": b"1.1", "reason": b""} + # This is an unhashable type. + __hash__ = None # type: ignore +@dataclass(init=False, frozen=True) class InformationalResponse(_ResponseBase): """An HTTP informational response. @@ -179,14 +203,18 @@ class InformationalResponse(_ResponseBase): """ - def _validate(self): + def __post_init__(self) -> None: if not (100 <= self.status_code < 200): raise LocalProtocolError( "InformationalResponse status_code should be in range " "[100, 200), not {}".format(self.status_code) ) + # This is an unhashable type. + __hash__ = None # type: ignore + +@dataclass(init=False, frozen=True) class Response(_ResponseBase): """The beginning of an HTTP response. @@ -196,7 +224,7 @@ class Response(_ResponseBase): The status code of this response, as an integer. For an :class:`Response`, this is always in the range [200, - 600). + 1000). .. attribute:: headers @@ -216,16 +244,20 @@ class Response(_ResponseBase): """ - def _validate(self): - if not (200 <= self.status_code < 600): + def __post_init__(self) -> None: + if not (200 <= self.status_code < 1000): raise LocalProtocolError( - "Response status_code should be in range [200, 600), not {}".format( + "Response status_code should be in range [200, 1000), not {}".format( self.status_code ) ) + # This is an unhashable type. + __hash__ = None # type: ignore + -class Data(_EventBundle): +@dataclass(init=False, frozen=True) +class Data(Event): """Part of an HTTP message body. Fields: @@ -258,8 +290,21 @@ class Data(_EventBundle): """ - _fields = ["data", "chunk_start", "chunk_end"] - _defaults = {"chunk_start": False, "chunk_end": False} + __slots__ = ("data", "chunk_start", "chunk_end") + + data: bytes + chunk_start: bool + chunk_end: bool + + def __init__( + self, data: bytes, chunk_start: bool = False, chunk_end: bool = False + ) -> None: + object.__setattr__(self, "data", data) + object.__setattr__(self, "chunk_start", chunk_start) + object.__setattr__(self, "chunk_end", chunk_end) + + # This is an unhashable type. + __hash__ = None # type: ignore # XX FIXME: "A recipient MUST ignore (or consider as an error) any fields that @@ -267,7 +312,8 @@ class Data(_EventBundle): # present in the header section might bypass external security filters." # https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#chunked.trailer.part # Unfortunately, the list of forbidden fields is long and vague :-/ -class EndOfMessage(_EventBundle): +@dataclass(init=False, frozen=True) +class EndOfMessage(Event): """The end of an HTTP message. Fields: @@ -284,11 +330,32 @@ class EndOfMessage(_EventBundle): """ - _fields = ["headers"] - _defaults = {"headers": []} + __slots__ = ("headers",) + + headers: Headers + + def __init__( + self, + *, + headers: Union[ + Headers, List[Tuple[bytes, bytes]], List[Tuple[str, str]], None + ] = None, + _parsed: bool = False, + ) -> None: + super().__init__() + if headers is None: + headers = Headers([]) + elif not isinstance(headers, Headers): + headers = normalize_and_validate(headers, _parsed=_parsed) + + object.__setattr__(self, "headers", headers) + + # This is an unhashable type. + __hash__ = None # type: ignore -class ConnectionClosed(_EventBundle): +@dataclass(frozen=True) +class ConnectionClosed(Event): """This event indicates that the sender has closed their outgoing connection. diff --git a/packages/h11/_headers.py b/packages/h11/_headers.py index 7ed39bc12..b97d020b6 100644 --- a/packages/h11/_headers.py +++ b/packages/h11/_headers.py @@ -1,8 +1,18 @@ import re +from typing import AnyStr, cast, List, overload, Sequence, Tuple, TYPE_CHECKING, Union from ._abnf import field_name, field_value from ._util import bytesify, LocalProtocolError, validate +if TYPE_CHECKING: + from ._events import Request + +try: + from typing import Literal +except ImportError: + from typing_extensions import Literal # type: ignore + + # Facts # ----- # @@ -57,12 +67,12 @@ # # Maybe a dict-of-lists would be better? -_content_length_re = re.compile(br"[0-9]+") +_content_length_re = re.compile(rb"[0-9]+") _field_name_re = re.compile(field_name.encode("ascii")) _field_value_re = re.compile(field_value.encode("ascii")) -class Headers: +class Headers(Sequence[Tuple[bytes, bytes]]): """ A list-like interface that allows iterating over headers as byte-pairs of (lowercased-name, value). @@ -89,34 +99,57 @@ class Headers: __slots__ = "_full_items" - def __init__(self, full_items): + def __init__(self, full_items: List[Tuple[bytes, bytes, bytes]]) -> None: self._full_items = full_items - def __iter__(self): - for _, name, value in self._full_items: - yield name, value - - def __bool__(self): + def __bool__(self) -> bool: return bool(self._full_items) - def __eq__(self, other): - return list(self) == list(other) + def __eq__(self, other: object) -> bool: + return list(self) == list(other) # type: ignore - def __len__(self): + def __len__(self) -> int: return len(self._full_items) - def __repr__(self): + def __repr__(self) -> str: return "" % repr(list(self)) - def __getitem__(self, idx): + def __getitem__(self, idx: int) -> Tuple[bytes, bytes]: # type: ignore[override] _, name, value = self._full_items[idx] return (name, value) - def raw_items(self): + def raw_items(self) -> List[Tuple[bytes, bytes]]: return [(raw_name, value) for raw_name, _, value in self._full_items] -def normalize_and_validate(headers, _parsed=False): +HeaderTypes = Union[ + List[Tuple[bytes, bytes]], + List[Tuple[bytes, str]], + List[Tuple[str, bytes]], + List[Tuple[str, str]], +] + + +@overload +def normalize_and_validate(headers: Headers, _parsed: Literal[True]) -> Headers: + ... + + +@overload +def normalize_and_validate(headers: HeaderTypes, _parsed: Literal[False]) -> Headers: + ... + + +@overload +def normalize_and_validate( + headers: Union[Headers, HeaderTypes], _parsed: bool = False +) -> Headers: + ... + + +def normalize_and_validate( + headers: Union[Headers, HeaderTypes], _parsed: bool = False +) -> Headers: new_headers = [] seen_content_length = None saw_transfer_encoding = False @@ -129,6 +162,9 @@ def normalize_and_validate(headers, _parsed=False): value = bytesify(value) validate(_field_name_re, name, "Illegal header name {!r}", name) validate(_field_value_re, value, "Illegal header value {!r}", value) + assert isinstance(name, bytes) + assert isinstance(value, bytes) + raw_name = name name = name.lower() if name == b"content-length": @@ -166,7 +202,7 @@ def normalize_and_validate(headers, _parsed=False): return Headers(new_headers) -def get_comma_header(headers, name): +def get_comma_header(headers: Headers, name: bytes) -> List[bytes]: # Should only be used for headers whose value is a list of # comma-separated, case-insensitive values. # @@ -202,7 +238,7 @@ def get_comma_header(headers, name): # Expect: the only legal value is the literal string # "100-continue". Splitting on commas is harmless. Case insensitive. # - out = [] + out: List[bytes] = [] for _, found_name, found_raw_value in headers._full_items: if found_name == name: found_raw_value = found_raw_value.lower() @@ -213,7 +249,7 @@ def get_comma_header(headers, name): return out -def set_comma_header(headers, name, new_values): +def set_comma_header(headers: Headers, name: bytes, new_values: List[bytes]) -> Headers: # The header name `name` is expected to be lower-case bytes. # # Note that when we store the header we use title casing for the header @@ -223,7 +259,7 @@ def set_comma_header(headers, name, new_values): # here given the cases where we're using `set_comma_header`... # # Connection, Content-Length, Transfer-Encoding. - new_headers = [] + new_headers: List[Tuple[bytes, bytes]] = [] for found_raw_name, found_name, found_raw_value in headers._full_items: if found_name != name: new_headers.append((found_raw_name, found_raw_value)) @@ -232,7 +268,7 @@ def set_comma_header(headers, name, new_values): return normalize_and_validate(new_headers) -def has_expect_100_continue(request): +def has_expect_100_continue(request: "Request") -> bool: # https://tools.ietf.org/html/rfc7231#section-5.1.1 # "A server that receives a 100-continue expectation in an HTTP/1.0 request # MUST ignore that expectation." diff --git a/packages/h11/_readers.py b/packages/h11/_readers.py index 0ead0bec3..08a9574da 100644 --- a/packages/h11/_readers.py +++ b/packages/h11/_readers.py @@ -17,30 +17,39 @@ # - or, for body readers, a dict of per-framing reader factories import re +from typing import Any, Callable, Dict, Iterable, NoReturn, Optional, Tuple, Type, Union from ._abnf import chunk_header, header_field, request_line, status_line -from ._events import * -from ._state import * -from ._util import LocalProtocolError, RemoteProtocolError, validate +from ._events import Data, EndOfMessage, InformationalResponse, Request, Response +from ._receivebuffer import ReceiveBuffer +from ._state import ( + CLIENT, + CLOSED, + DONE, + IDLE, + MUST_CLOSE, + SEND_BODY, + SEND_RESPONSE, + SERVER, +) +from ._util import LocalProtocolError, RemoteProtocolError, Sentinel, validate __all__ = ["READERS"] header_field_re = re.compile(header_field.encode("ascii")) +obs_fold_re = re.compile(rb"[ \t]+") -# Remember that this has to run in O(n) time -- so e.g. the bytearray cast is -# critical. -obs_fold_re = re.compile(br"[ \t]+") - -def _obsolete_line_fold(lines): +def _obsolete_line_fold(lines: Iterable[bytes]) -> Iterable[bytes]: it = iter(lines) - last = None + last: Optional[bytes] = None for line in it: match = obs_fold_re.match(line) if match: if last is None: raise LocalProtocolError("continuation line at start of headers") if not isinstance(last, bytearray): + # Cast to a mutable type, avoiding copy on append to ensure O(n) time last = bytearray(last) last += b" " last += line[match.end() :] @@ -52,7 +61,9 @@ def _obsolete_line_fold(lines): yield last -def _decode_header_lines(lines): +def _decode_header_lines( + lines: Iterable[bytes], +) -> Iterable[Tuple[bytes, bytes]]: for line in _obsolete_line_fold(lines): matches = validate(header_field_re, line, "illegal header line: {!r}", line) yield (matches["field_name"], matches["field_value"]) @@ -61,7 +72,7 @@ def _decode_header_lines(lines): request_line_re = re.compile(request_line.encode("ascii")) -def maybe_read_from_IDLE_client(buf): +def maybe_read_from_IDLE_client(buf: ReceiveBuffer) -> Optional[Request]: lines = buf.maybe_extract_lines() if lines is None: if buf.is_next_line_obviously_invalid_request_line(): @@ -80,7 +91,9 @@ def maybe_read_from_IDLE_client(buf): status_line_re = re.compile(status_line.encode("ascii")) -def maybe_read_from_SEND_RESPONSE_server(buf): +def maybe_read_from_SEND_RESPONSE_server( + buf: ReceiveBuffer, +) -> Union[InformationalResponse, Response, None]: lines = buf.maybe_extract_lines() if lines is None: if buf.is_next_line_obviously_invalid_request_line(): @@ -89,22 +102,29 @@ def maybe_read_from_SEND_RESPONSE_server(buf): if not lines: raise LocalProtocolError("no response line received") matches = validate(status_line_re, lines[0], "illegal status line: {!r}", lines[0]) - # Tolerate missing reason phrases - if matches["reason"] is None: - matches["reason"] = b"" - status_code = matches["status_code"] = int(matches["status_code"]) - class_ = InformationalResponse if status_code < 200 else Response + http_version = ( + b"1.1" if matches["http_version"] is None else matches["http_version"] + ) + reason = b"" if matches["reason"] is None else matches["reason"] + status_code = int(matches["status_code"]) + class_: Union[Type[InformationalResponse], Type[Response]] = ( + InformationalResponse if status_code < 200 else Response + ) return class_( - headers=list(_decode_header_lines(lines[1:])), _parsed=True, **matches + headers=list(_decode_header_lines(lines[1:])), + _parsed=True, + status_code=status_code, + reason=reason, + http_version=http_version, ) class ContentLengthReader: - def __init__(self, length): + def __init__(self, length: int) -> None: self._length = length self._remaining = length - def __call__(self, buf): + def __call__(self, buf: ReceiveBuffer) -> Union[Data, EndOfMessage, None]: if self._remaining == 0: return EndOfMessage() data = buf.maybe_extract_at_most(self._remaining) @@ -113,7 +133,7 @@ def __call__(self, buf): self._remaining -= len(data) return Data(data=data) - def read_eof(self): + def read_eof(self) -> NoReturn: raise RemoteProtocolError( "peer closed connection without sending complete message body " "(received {} bytes, expected {})".format( @@ -126,7 +146,7 @@ def read_eof(self): class ChunkedReader: - def __init__(self): + def __init__(self) -> None: self._bytes_in_chunk = 0 # After reading a chunk, we have to throw away the trailing \r\n; if # this is >0 then we discard that many bytes before resuming regular @@ -134,7 +154,7 @@ def __init__(self): self._bytes_to_discard = 0 self._reading_trailer = False - def __call__(self, buf): + def __call__(self, buf: ReceiveBuffer) -> Union[Data, EndOfMessage, None]: if self._reading_trailer: lines = buf.maybe_extract_lines() if lines is None: @@ -180,7 +200,7 @@ def __call__(self, buf): chunk_end = False return Data(data=data, chunk_start=chunk_start, chunk_end=chunk_end) - def read_eof(self): + def read_eof(self) -> NoReturn: raise RemoteProtocolError( "peer closed connection without sending complete message body " "(incomplete chunked read)" @@ -188,23 +208,28 @@ def read_eof(self): class Http10Reader: - def __call__(self, buf): + def __call__(self, buf: ReceiveBuffer) -> Optional[Data]: data = buf.maybe_extract_at_most(999999999) if data is None: return None return Data(data=data) - def read_eof(self): + def read_eof(self) -> EndOfMessage: return EndOfMessage() -def expect_nothing(buf): +def expect_nothing(buf: ReceiveBuffer) -> None: if buf: raise LocalProtocolError("Got data when expecting EOF") return None -READERS = { +ReadersType = Dict[ + Union[Type[Sentinel], Tuple[Type[Sentinel], Type[Sentinel]]], + Union[Callable[..., Any], Dict[str, Callable[..., Any]]], +] + +READERS: ReadersType = { (CLIENT, IDLE): maybe_read_from_IDLE_client, (SERVER, IDLE): maybe_read_from_SEND_RESPONSE_server, (SERVER, SEND_RESPONSE): maybe_read_from_SEND_RESPONSE_server, diff --git a/packages/h11/_receivebuffer.py b/packages/h11/_receivebuffer.py index a3737f351..e5c4e08a5 100644 --- a/packages/h11/_receivebuffer.py +++ b/packages/h11/_receivebuffer.py @@ -1,5 +1,6 @@ import re import sys +from typing import List, Optional, Union __all__ = ["ReceiveBuffer"] @@ -44,26 +45,26 @@ class ReceiveBuffer: - def __init__(self): + def __init__(self) -> None: self._data = bytearray() self._next_line_search = 0 self._multiple_lines_search = 0 - def __iadd__(self, byteslike): + def __iadd__(self, byteslike: Union[bytes, bytearray]) -> "ReceiveBuffer": self._data += byteslike return self - def __bool__(self): + def __bool__(self) -> bool: return bool(len(self)) - def __len__(self): + def __len__(self) -> int: return len(self._data) # for @property unprocessed_data - def __bytes__(self): + def __bytes__(self) -> bytes: return bytes(self._data) - def _extract(self, count): + def _extract(self, count: int) -> bytearray: # extracting an initial slice of the data buffer and return it out = self._data[:count] del self._data[:count] @@ -73,7 +74,7 @@ def _extract(self, count): return out - def maybe_extract_at_most(self, count): + def maybe_extract_at_most(self, count: int) -> Optional[bytearray]: """ Extract a fixed number of bytes from the buffer. """ @@ -83,7 +84,7 @@ def maybe_extract_at_most(self, count): return self._extract(count) - def maybe_extract_next_line(self): + def maybe_extract_next_line(self) -> Optional[bytearray]: """ Extract the first line, if it is completed in the buffer. """ @@ -100,7 +101,7 @@ def maybe_extract_next_line(self): return self._extract(idx) - def maybe_extract_lines(self): + def maybe_extract_lines(self) -> Optional[List[bytearray]]: """ Extract everything up to the first blank line, and return a list of lines. """ @@ -143,7 +144,7 @@ def maybe_extract_lines(self): # This is especially interesting when peer is messing up with HTTPS and # sent us a TLS stream where we were expecting plain HTTP given all # versions of TLS so far start handshake with a 0x16 message type code. - def is_next_line_obviously_invalid_request_line(self): + def is_next_line_obviously_invalid_request_line(self) -> bool: try: # HTTP header line must not contain non-printable characters # and should not start with a space diff --git a/packages/h11/_state.py b/packages/h11/_state.py index 0f08a090c..3593430a7 100644 --- a/packages/h11/_state.py +++ b/packages/h11/_state.py @@ -110,9 +110,10 @@ # tables. But it can't automatically read the transitions that are written # directly in Python code. So if you touch those, you need to also update the # script to keep it in sync! +from typing import cast, Dict, Optional, Set, Tuple, Type, Union from ._events import * -from ._util import LocalProtocolError, make_sentinel +from ._util import LocalProtocolError, Sentinel # Everything in __all__ gets re-exported as part of the h11 public API. __all__ = [ @@ -129,26 +130,70 @@ "ERROR", ] -CLIENT = make_sentinel("CLIENT") -SERVER = make_sentinel("SERVER") + +class CLIENT(Sentinel, metaclass=Sentinel): + pass + + +class SERVER(Sentinel, metaclass=Sentinel): + pass + # States -IDLE = make_sentinel("IDLE") -SEND_RESPONSE = make_sentinel("SEND_RESPONSE") -SEND_BODY = make_sentinel("SEND_BODY") -DONE = make_sentinel("DONE") -MUST_CLOSE = make_sentinel("MUST_CLOSE") -CLOSED = make_sentinel("CLOSED") -ERROR = make_sentinel("ERROR") +class IDLE(Sentinel, metaclass=Sentinel): + pass + + +class SEND_RESPONSE(Sentinel, metaclass=Sentinel): + pass + + +class SEND_BODY(Sentinel, metaclass=Sentinel): + pass + + +class DONE(Sentinel, metaclass=Sentinel): + pass + + +class MUST_CLOSE(Sentinel, metaclass=Sentinel): + pass + + +class CLOSED(Sentinel, metaclass=Sentinel): + pass + + +class ERROR(Sentinel, metaclass=Sentinel): + pass + # Switch types -MIGHT_SWITCH_PROTOCOL = make_sentinel("MIGHT_SWITCH_PROTOCOL") -SWITCHED_PROTOCOL = make_sentinel("SWITCHED_PROTOCOL") +class MIGHT_SWITCH_PROTOCOL(Sentinel, metaclass=Sentinel): + pass + + +class SWITCHED_PROTOCOL(Sentinel, metaclass=Sentinel): + pass + + +class _SWITCH_UPGRADE(Sentinel, metaclass=Sentinel): + pass + -_SWITCH_UPGRADE = make_sentinel("_SWITCH_UPGRADE") -_SWITCH_CONNECT = make_sentinel("_SWITCH_CONNECT") +class _SWITCH_CONNECT(Sentinel, metaclass=Sentinel): + pass -EVENT_TRIGGERED_TRANSITIONS = { + +EventTransitionType = Dict[ + Type[Sentinel], + Dict[ + Type[Sentinel], + Dict[Union[Type[Event], Tuple[Type[Event], Type[Sentinel]]], Type[Sentinel]], + ], +] + +EVENT_TRIGGERED_TRANSITIONS: EventTransitionType = { CLIENT: { IDLE: {Request: SEND_BODY, ConnectionClosed: CLOSED}, SEND_BODY: {Data: SEND_BODY, EndOfMessage: DONE}, @@ -181,9 +226,13 @@ }, } +StateTransitionType = Dict[ + Tuple[Type[Sentinel], Type[Sentinel]], Dict[Type[Sentinel], Type[Sentinel]] +] + # NB: there are also some special-case state-triggered transitions hard-coded # into _fire_state_triggered_transitions below. -STATE_TRIGGERED_TRANSITIONS = { +STATE_TRIGGERED_TRANSITIONS: StateTransitionType = { # (Client state, Server state) -> new states # Protocol negotiation (MIGHT_SWITCH_PROTOCOL, SWITCHED_PROTOCOL): {CLIENT: SWITCHED_PROTOCOL}, @@ -198,7 +247,7 @@ class ConnectionState: - def __init__(self): + def __init__(self) -> None: # Extra bits of state that don't quite fit into the state model. # If this is False then it enables the automatic DONE -> MUST_CLOSE @@ -207,23 +256,29 @@ def __init__(self): # This is a subset of {UPGRADE, CONNECT}, containing the proposals # made by the client for switching protocols. - self.pending_switch_proposals = set() + self.pending_switch_proposals: Set[Type[Sentinel]] = set() - self.states = {CLIENT: IDLE, SERVER: IDLE} + self.states: Dict[Type[Sentinel], Type[Sentinel]] = {CLIENT: IDLE, SERVER: IDLE} - def process_error(self, role): + def process_error(self, role: Type[Sentinel]) -> None: self.states[role] = ERROR self._fire_state_triggered_transitions() - def process_keep_alive_disabled(self): + def process_keep_alive_disabled(self) -> None: self.keep_alive = False self._fire_state_triggered_transitions() - def process_client_switch_proposal(self, switch_event): + def process_client_switch_proposal(self, switch_event: Type[Sentinel]) -> None: self.pending_switch_proposals.add(switch_event) self._fire_state_triggered_transitions() - def process_event(self, role, event_type, server_switch_event=None): + def process_event( + self, + role: Type[Sentinel], + event_type: Type[Event], + server_switch_event: Optional[Type[Sentinel]] = None, + ) -> None: + _event_type: Union[Type[Event], Tuple[Type[Event], Type[Sentinel]]] = event_type if server_switch_event is not None: assert role is SERVER if server_switch_event not in self.pending_switch_proposals: @@ -232,30 +287,35 @@ def process_event(self, role, event_type, server_switch_event=None): server_switch_event ) ) - event_type = (event_type, server_switch_event) - if server_switch_event is None and event_type is Response: + _event_type = (event_type, server_switch_event) + if server_switch_event is None and _event_type is Response: self.pending_switch_proposals = set() - self._fire_event_triggered_transitions(role, event_type) + self._fire_event_triggered_transitions(role, _event_type) # Special case: the server state does get to see Request # events. - if event_type is Request: + if _event_type is Request: assert role is CLIENT self._fire_event_triggered_transitions(SERVER, (Request, CLIENT)) self._fire_state_triggered_transitions() - def _fire_event_triggered_transitions(self, role, event_type): + def _fire_event_triggered_transitions( + self, + role: Type[Sentinel], + event_type: Union[Type[Event], Tuple[Type[Event], Type[Sentinel]]], + ) -> None: state = self.states[role] try: new_state = EVENT_TRIGGERED_TRANSITIONS[role][state][event_type] except KeyError: + event_type = cast(Type[Event], event_type) raise LocalProtocolError( "can't handle event type {} when role={} and state={}".format( event_type.__name__, role, self.states[role] ) - ) + ) from None self.states[role] = new_state - def _fire_state_triggered_transitions(self): + def _fire_state_triggered_transitions(self) -> None: # We apply these rules repeatedly until converging on a fixed point while True: start_states = dict(self.states) @@ -295,7 +355,7 @@ def _fire_state_triggered_transitions(self): # Fixed point reached return - def start_next_cycle(self): + def start_next_cycle(self) -> None: if self.states != {CLIENT: DONE, SERVER: DONE}: raise LocalProtocolError( "not in a reusable state. self.states={}".format(self.states) diff --git a/packages/h11/_util.py b/packages/h11/_util.py index eb1a5cd9e..671844529 100644 --- a/packages/h11/_util.py +++ b/packages/h11/_util.py @@ -1,9 +1,10 @@ +from typing import Any, Dict, NoReturn, Pattern, Tuple, Type, TypeVar, Union + __all__ = [ "ProtocolError", "LocalProtocolError", "RemoteProtocolError", "validate", - "make_sentinel", "bytesify", ] @@ -37,7 +38,7 @@ class ProtocolError(Exception): """ - def __init__(self, msg, error_status_hint=400): + def __init__(self, msg: str, error_status_hint: int = 400) -> None: if type(self) is ProtocolError: raise TypeError("tried to directly instantiate ProtocolError") Exception.__init__(self, msg) @@ -56,14 +57,14 @@ def __init__(self, msg, error_status_hint=400): # LocalProtocolError is for local errors and RemoteProtocolError is for # remote errors. class LocalProtocolError(ProtocolError): - def _reraise_as_remote_protocol_error(self): + def _reraise_as_remote_protocol_error(self) -> NoReturn: # After catching a LocalProtocolError, use this method to re-raise it # as a RemoteProtocolError. This method must be called from inside an # except: block. # # An easy way to get an equivalent RemoteProtocolError is just to # modify 'self' in place. - self.__class__ = RemoteProtocolError + self.__class__ = RemoteProtocolError # type: ignore # But the re-raising is somewhat non-trivial -- you might think that # now that we've modified the in-flight exception object, that just # doing 'raise' to re-raise it would be enough. But it turns out that @@ -80,7 +81,9 @@ class RemoteProtocolError(ProtocolError): pass -def validate(regex, data, msg="malformed data", *format_args): +def validate( + regex: Pattern[bytes], data: bytes, msg: str = "malformed data", *format_args: Any +) -> Dict[str, bytes]: match = regex.fullmatch(data) if not match: if format_args: @@ -97,21 +100,31 @@ def validate(regex, data, msg="malformed data", *format_args): # # The bonus property is useful if you want to take the return value from # next_event() and do some sort of dispatch based on type(event). -class _SentinelBase(type): - def __repr__(self): - return self.__name__ + +_T_Sentinel = TypeVar("_T_Sentinel", bound="Sentinel") -def make_sentinel(name): - cls = _SentinelBase(name, (_SentinelBase,), {}) - cls.__class__ = cls - return cls +class Sentinel(type): + def __new__( + cls: Type[_T_Sentinel], + name: str, + bases: Tuple[type, ...], + namespace: Dict[str, Any], + **kwds: Any + ) -> _T_Sentinel: + assert bases == (Sentinel,) + v = super().__new__(cls, name, bases, namespace, **kwds) + v.__class__ = v # type: ignore + return v + + def __repr__(self) -> str: + return self.__name__ # Used for methods, request targets, HTTP versions, header names, and header # values. Accepts ascii-strings, or bytes/bytearray/memoryview/..., and always # returns bytes. -def bytesify(s): +def bytesify(s: Union[bytes, bytearray, memoryview, int, str]) -> bytes: # Fast-path: if type(s) is bytes: return s diff --git a/packages/h11/_version.py b/packages/h11/_version.py index cb5c2c322..4c8911305 100644 --- a/packages/h11/_version.py +++ b/packages/h11/_version.py @@ -13,4 +13,4 @@ # want. (Contrast with the special suffix 1.0.0.dev, which sorts *before* # 1.0.0.) -__version__ = "0.12.0" +__version__ = "0.14.0" diff --git a/packages/h11/_writers.py b/packages/h11/_writers.py index cb5e8a8c5..939cdb912 100644 --- a/packages/h11/_writers.py +++ b/packages/h11/_writers.py @@ -7,14 +7,19 @@ # - a writer # - or, for body writers, a dict of framin-dependent writer factories -from ._events import Data, EndOfMessage +from typing import Any, Callable, Dict, List, Tuple, Type, Union + +from ._events import Data, EndOfMessage, Event, InformationalResponse, Request, Response +from ._headers import Headers from ._state import CLIENT, IDLE, SEND_BODY, SEND_RESPONSE, SERVER -from ._util import LocalProtocolError +from ._util import LocalProtocolError, Sentinel __all__ = ["WRITERS"] +Writer = Callable[[bytes], Any] + -def write_headers(headers, write): +def write_headers(headers: Headers, write: Writer) -> None: # "Since the Host field-value is critical information for handling a # request, a user agent SHOULD generate Host as the first header field # following the request-line." - RFC 7230 @@ -28,7 +33,7 @@ def write_headers(headers, write): write(b"\r\n") -def write_request(request, write): +def write_request(request: Request, write: Writer) -> None: if request.http_version != b"1.1": raise LocalProtocolError("I only send HTTP/1.1") write(b"%s %s HTTP/1.1\r\n" % (request.method, request.target)) @@ -36,7 +41,9 @@ def write_request(request, write): # Shared between InformationalResponse and Response -def write_any_response(response, write): +def write_any_response( + response: Union[InformationalResponse, Response], write: Writer +) -> None: if response.http_version != b"1.1": raise LocalProtocolError("I only send HTTP/1.1") status_bytes = str(response.status_code).encode("ascii") @@ -53,7 +60,7 @@ def write_any_response(response, write): class BodyWriter: - def __call__(self, event, write): + def __call__(self, event: Event, write: Writer) -> None: if type(event) is Data: self.send_data(event.data, write) elif type(event) is EndOfMessage: @@ -61,6 +68,12 @@ def __call__(self, event, write): else: # pragma: no cover assert False + def send_data(self, data: bytes, write: Writer) -> None: + pass + + def send_eom(self, headers: Headers, write: Writer) -> None: + pass + # # These are all careful not to do anything to 'data' except call len(data) and @@ -69,16 +82,16 @@ def __call__(self, event, write): # sendfile(2). # class ContentLengthWriter(BodyWriter): - def __init__(self, length): + def __init__(self, length: int) -> None: self._length = length - def send_data(self, data, write): + def send_data(self, data: bytes, write: Writer) -> None: self._length -= len(data) if self._length < 0: raise LocalProtocolError("Too much data for declared Content-Length") write(data) - def send_eom(self, headers, write): + def send_eom(self, headers: Headers, write: Writer) -> None: if self._length != 0: raise LocalProtocolError("Too little data for declared Content-Length") if headers: @@ -86,7 +99,7 @@ def send_eom(self, headers, write): class ChunkedWriter(BodyWriter): - def send_data(self, data, write): + def send_data(self, data: bytes, write: Writer) -> None: # if we encoded 0-length data in the naive way, it would look like an # end-of-message. if not data: @@ -95,23 +108,32 @@ def send_data(self, data, write): write(data) write(b"\r\n") - def send_eom(self, headers, write): + def send_eom(self, headers: Headers, write: Writer) -> None: write(b"0\r\n") write_headers(headers, write) class Http10Writer(BodyWriter): - def send_data(self, data, write): + def send_data(self, data: bytes, write: Writer) -> None: write(data) - def send_eom(self, headers, write): + def send_eom(self, headers: Headers, write: Writer) -> None: if headers: raise LocalProtocolError("can't send trailers to HTTP/1.0 client") # no need to close the socket ourselves, that will be taken care of by # Connection: close machinery -WRITERS = { +WritersType = Dict[ + Union[Tuple[Type[Sentinel], Type[Sentinel]], Type[Sentinel]], + Union[ + Dict[str, Type[BodyWriter]], + Callable[[Union[InformationalResponse, Response], Writer], None], + Callable[[Request, Writer], None], + ], +] + +WRITERS: WritersType = { (CLIENT, IDLE): write_request, (SERVER, IDLE): write_any_response, (SERVER, SEND_RESPONSE): write_any_response, diff --git a/packages/h11/py.typed b/packages/h11/py.typed new file mode 100644 index 000000000..f5642f79f --- /dev/null +++ b/packages/h11/py.typed @@ -0,0 +1 @@ +Marker diff --git a/packages/h11/tests/helpers.py b/packages/h11/tests/helpers.py index 9d2cf3801..571be4446 100644 --- a/packages/h11/tests/helpers.py +++ b/packages/h11/tests/helpers.py @@ -1,36 +1,55 @@ -from .._connection import * -from .._events import * -from .._state import * +from typing import cast, List, Type, Union, ValuesView +from .._connection import Connection, NEED_DATA, PAUSED +from .._events import ( + ConnectionClosed, + Data, + EndOfMessage, + Event, + InformationalResponse, + Request, + Response, +) +from .._state import CLIENT, CLOSED, DONE, MUST_CLOSE, SERVER +from .._util import Sentinel -def get_all_events(conn): +try: + from typing import Literal +except ImportError: + from typing_extensions import Literal # type: ignore + + +def get_all_events(conn: Connection) -> List[Event]: got_events = [] while True: event = conn.next_event() if event in (NEED_DATA, PAUSED): break + event = cast(Event, event) got_events.append(event) if type(event) is ConnectionClosed: break return got_events -def receive_and_get(conn, data): +def receive_and_get(conn: Connection, data: bytes) -> List[Event]: conn.receive_data(data) return get_all_events(conn) # Merges adjacent Data events, converts payloads to bytestrings, and removes # chunk boundaries. -def normalize_data_events(in_events): - out_events = [] +def normalize_data_events(in_events: List[Event]) -> List[Event]: + out_events: List[Event] = [] for event in in_events: if type(event) is Data: - event.data = bytes(event.data) - event.chunk_start = False - event.chunk_end = False + event = Data(data=bytes(event.data), chunk_start=False, chunk_end=False) if out_events and type(out_events[-1]) is type(event) is Data: - out_events[-1].data += event.data + out_events[-1] = Data( + data=out_events[-1].data + event.data, + chunk_start=out_events[-1].chunk_start, + chunk_end=out_events[-1].chunk_end, + ) else: out_events.append(event) return out_events @@ -41,16 +60,21 @@ def normalize_data_events(in_events): # of pushing them through two Connections with a fake network link in # between. class ConnectionPair: - def __init__(self): + def __init__(self) -> None: self.conn = {CLIENT: Connection(CLIENT), SERVER: Connection(SERVER)} self.other = {CLIENT: SERVER, SERVER: CLIENT} @property - def conns(self): + def conns(self) -> ValuesView[Connection]: return self.conn.values() # expect="match" if expect=send_events; expect=[...] to say what expected - def send(self, role, send_events, expect="match"): + def send( + self, + role: Type[Sentinel], + send_events: Union[List[Event], Event], + expect: Union[List[Event], Event, Literal["match"]] = "match", + ) -> bytes: if not isinstance(send_events, list): send_events = [send_events] data = b"" diff --git a/packages/h11/tests/test_against_stdlib_http.py b/packages/h11/tests/test_against_stdlib_http.py index e6c5db444..d2ee13149 100644 --- a/packages/h11/tests/test_against_stdlib_http.py +++ b/packages/h11/tests/test_against_stdlib_http.py @@ -5,13 +5,16 @@ import threading from contextlib import closing, contextmanager from http.server import SimpleHTTPRequestHandler +from typing import Callable, Generator from urllib.request import urlopen import h11 @contextmanager -def socket_server(handler): +def socket_server( + handler: Callable[..., socketserver.BaseRequestHandler] +) -> Generator[socketserver.TCPServer, None, None]: httpd = socketserver.TCPServer(("127.0.0.1", 0), handler) thread = threading.Thread( target=httpd.serve_forever, kwargs={"poll_interval": 0.01} @@ -30,23 +33,23 @@ def socket_server(handler): class SingleMindedRequestHandler(SimpleHTTPRequestHandler): - def translate_path(self, path): + def translate_path(self, path: str) -> str: return test_file_path -def test_h11_as_client(): +def test_h11_as_client() -> None: with socket_server(SingleMindedRequestHandler) as httpd: with closing(socket.create_connection(httpd.server_address)) as s: c = h11.Connection(h11.CLIENT) s.sendall( - c.send( + c.send( # type: ignore[arg-type] h11.Request( method="GET", target="/foo", headers=[("Host", "localhost")] ) ) ) - s.sendall(c.send(h11.EndOfMessage())) + s.sendall(c.send(h11.EndOfMessage())) # type: ignore[arg-type] data = bytearray() while True: @@ -67,7 +70,7 @@ def test_h11_as_client(): class H11RequestHandler(socketserver.BaseRequestHandler): - def handle(self): + def handle(self) -> None: with closing(self.request) as s: c = h11.Connection(h11.SERVER) request = None @@ -82,6 +85,7 @@ def handle(self): request = event if type(event) is h11.EndOfMessage: break + assert request is not None info = json.dumps( { "method": request.method.decode("ascii"), @@ -92,12 +96,12 @@ def handle(self): }, } ) - s.sendall(c.send(h11.Response(status_code=200, headers=[]))) + s.sendall(c.send(h11.Response(status_code=200, headers=[]))) # type: ignore[arg-type] s.sendall(c.send(h11.Data(data=info.encode("ascii")))) s.sendall(c.send(h11.EndOfMessage())) -def test_h11_as_server(): +def test_h11_as_server() -> None: with socket_server(H11RequestHandler) as httpd: host, port = httpd.server_address url = "http://{}:{}/some-path".format(host, port) diff --git a/packages/h11/tests/test_connection.py b/packages/h11/tests/test_connection.py index baadec8d5..73a27b98b 100644 --- a/packages/h11/tests/test_connection.py +++ b/packages/h11/tests/test_connection.py @@ -1,13 +1,35 @@ +from typing import Any, cast, Dict, List, Optional, Tuple, Type + import pytest from .._connection import _body_framing, _keep_alive, Connection, NEED_DATA, PAUSED -from .._events import * -from .._state import * -from .._util import LocalProtocolError, RemoteProtocolError +from .._events import ( + ConnectionClosed, + Data, + EndOfMessage, + Event, + InformationalResponse, + Request, + Response, +) +from .._state import ( + CLIENT, + CLOSED, + DONE, + ERROR, + IDLE, + MIGHT_SWITCH_PROTOCOL, + MUST_CLOSE, + SEND_BODY, + SEND_RESPONSE, + SERVER, + SWITCHED_PROTOCOL, +) +from .._util import LocalProtocolError, RemoteProtocolError, Sentinel from .helpers import ConnectionPair, get_all_events, receive_and_get -def test__keep_alive(): +def test__keep_alive() -> None: assert _keep_alive( Request(method="GET", target="/", headers=[("Host", "Example.com")]) ) @@ -26,19 +48,19 @@ def test__keep_alive(): ) ) assert not _keep_alive( - Request(method="GET", target="/", headers=[], http_version="1.0") + Request(method="GET", target="/", headers=[], http_version="1.0") # type: ignore[arg-type] ) - assert _keep_alive(Response(status_code=200, headers=[])) + assert _keep_alive(Response(status_code=200, headers=[])) # type: ignore[arg-type] assert not _keep_alive(Response(status_code=200, headers=[("Connection", "close")])) assert not _keep_alive( Response(status_code=200, headers=[("Connection", "a, b, cLOse, foo")]) ) - assert not _keep_alive(Response(status_code=200, headers=[], http_version="1.0")) + assert not _keep_alive(Response(status_code=200, headers=[], http_version="1.0")) # type: ignore[arg-type] -def test__body_framing(): - def headers(cl, te): +def test__body_framing() -> None: + def headers(cl: Optional[int], te: bool) -> List[Tuple[str, str]]: headers = [] if cl is not None: headers.append(("Content-Length", str(cl))) @@ -46,16 +68,19 @@ def headers(cl, te): headers.append(("Transfer-Encoding", "chunked")) return headers - def resp(status_code=200, cl=None, te=False): + def resp( + status_code: int = 200, cl: Optional[int] = None, te: bool = False + ) -> Response: return Response(status_code=status_code, headers=headers(cl, te)) - def req(cl=None, te=False): + def req(cl: Optional[int] = None, te: bool = False) -> Request: h = headers(cl, te) h += [("Host", "example.com")] return Request(method="GET", target="/", headers=h) # Special cases where the headers are ignored: for kwargs in [{}, {"cl": 100}, {"te": True}, {"cl": 100, "te": True}]: + kwargs = cast(Dict[str, Any], kwargs) for meth, r in [ (b"HEAD", resp(**kwargs)), (b"GET", resp(status_code=204, **kwargs)), @@ -65,21 +90,22 @@ def req(cl=None, te=False): # Transfer-encoding for kwargs in [{"te": True}, {"cl": 100, "te": True}]: - for meth, r in [(None, req(**kwargs)), (b"GET", resp(**kwargs))]: + kwargs = cast(Dict[str, Any], kwargs) + for meth, r in [(None, req(**kwargs)), (b"GET", resp(**kwargs))]: # type: ignore assert _body_framing(meth, r) == ("chunked", ()) # Content-Length - for meth, r in [(None, req(cl=100)), (b"GET", resp(cl=100))]: + for meth, r in [(None, req(cl=100)), (b"GET", resp(cl=100))]: # type: ignore assert _body_framing(meth, r) == ("content-length", (100,)) # No headers - assert _body_framing(None, req()) == ("content-length", (0,)) + assert _body_framing(None, req()) == ("content-length", (0,)) # type: ignore assert _body_framing(b"GET", resp()) == ("http/1.0", ()) -def test_Connection_basics_and_content_length(): +def test_Connection_basics_and_content_length() -> None: with pytest.raises(ValueError): - Connection("CLIENT") + Connection("CLIENT") # type: ignore p = ConnectionPair() assert p.conn[CLIENT].our_role is CLIENT @@ -109,7 +135,7 @@ def test_Connection_basics_and_content_length(): assert p.conn[CLIENT].their_http_version is None assert p.conn[SERVER].their_http_version == b"1.1" - data = p.send(SERVER, InformationalResponse(status_code=100, headers=[])) + data = p.send(SERVER, InformationalResponse(status_code=100, headers=[])) # type: ignore[arg-type] assert data == b"HTTP/1.1 100 \r\n\r\n" data = p.send(SERVER, Response(status_code=200, headers=[("Content-Length", "11")])) @@ -144,7 +170,7 @@ def test_Connection_basics_and_content_length(): assert conn.states == {CLIENT: DONE, SERVER: DONE} -def test_chunked(): +def test_chunked() -> None: p = ConnectionPair() p.send( @@ -175,7 +201,7 @@ def test_chunked(): assert conn.states == {CLIENT: DONE, SERVER: DONE} -def test_chunk_boundaries(): +def test_chunk_boundaries() -> None: conn = Connection(our_role=SERVER) request = ( @@ -214,14 +240,14 @@ def test_chunk_boundaries(): assert conn.next_event() == EndOfMessage() -def test_client_talking_to_http10_server(): +def test_client_talking_to_http10_server() -> None: c = Connection(CLIENT) c.send(Request(method="GET", target="/", headers=[("Host", "example.com")])) c.send(EndOfMessage()) assert c.our_state is DONE # No content-length, so Http10 framing for body assert receive_and_get(c, b"HTTP/1.0 200 OK\r\n\r\n") == [ - Response(status_code=200, headers=[], http_version="1.0", reason=b"OK") + Response(status_code=200, headers=[], http_version="1.0", reason=b"OK") # type: ignore[arg-type] ] assert c.our_state is MUST_CLOSE assert receive_and_get(c, b"12345") == [Data(data=b"12345")] @@ -230,19 +256,19 @@ def test_client_talking_to_http10_server(): assert c.their_state is CLOSED -def test_server_talking_to_http10_client(): +def test_server_talking_to_http10_client() -> None: c = Connection(SERVER) # No content-length, so no body # NB: no host header assert receive_and_get(c, b"GET / HTTP/1.0\r\n\r\n") == [ - Request(method="GET", target="/", headers=[], http_version="1.0"), + Request(method="GET", target="/", headers=[], http_version="1.0"), # type: ignore[arg-type] EndOfMessage(), ] assert c.their_state is MUST_CLOSE # We automatically Connection: close back at them assert ( - c.send(Response(status_code=200, headers=[])) + c.send(Response(status_code=200, headers=[])) # type: ignore[arg-type] == b"HTTP/1.1 200 \r\nConnection: close\r\n\r\n" ) @@ -267,7 +293,7 @@ def test_server_talking_to_http10_client(): assert receive_and_get(c, b"") == [ConnectionClosed()] -def test_automatic_transfer_encoding_in_response(): +def test_automatic_transfer_encoding_in_response() -> None: # Check that in responses, the user can specify either Transfer-Encoding: # chunked or no framing at all, and in both cases we automatically select # the right option depending on whether the peer speaks HTTP/1.0 or @@ -279,6 +305,7 @@ def test_automatic_transfer_encoding_in_response(): # because if both are set then Transfer-Encoding wins [("Transfer-Encoding", "chunked"), ("Content-Length", "100")], ]: + user_headers = cast(List[Tuple[str, str]], user_headers) p = ConnectionPair() p.send( CLIENT, @@ -308,7 +335,7 @@ def test_automatic_transfer_encoding_in_response(): assert c.send(Data(data=b"12345")) == b"12345" -def test_automagic_connection_close_handling(): +def test_automagic_connection_close_handling() -> None: p = ConnectionPair() # If the user explicitly sets Connection: close, then we notice and # respect it @@ -329,7 +356,7 @@ def test_automagic_connection_close_handling(): p.send( SERVER, # no header here... - [Response(status_code=204, headers=[]), EndOfMessage()], + [Response(status_code=204, headers=[]), EndOfMessage()], # type: ignore[arg-type] # ...but oh look, it arrived anyway expect=[ Response(status_code=204, headers=[("connection", "close")]), @@ -340,8 +367,8 @@ def test_automagic_connection_close_handling(): assert conn.states == {CLIENT: MUST_CLOSE, SERVER: MUST_CLOSE} -def test_100_continue(): - def setup(): +def test_100_continue() -> None: + def setup() -> ConnectionPair: p = ConnectionPair() p.send( CLIENT, @@ -363,7 +390,7 @@ def setup(): # Disabled by 100 Continue p = setup() - p.send(SERVER, InformationalResponse(status_code=100, headers=[])) + p.send(SERVER, InformationalResponse(status_code=100, headers=[])) # type: ignore[arg-type] for conn in p.conns: assert not conn.client_is_waiting_for_100_continue assert not conn.they_are_waiting_for_100_continue @@ -385,7 +412,7 @@ def setup(): assert not conn.they_are_waiting_for_100_continue -def test_max_incomplete_event_size_countermeasure(): +def test_max_incomplete_event_size_countermeasure() -> None: # Infinitely long headers are definitely not okay c = Connection(SERVER) c.receive_data(b"GET / HTTP/1.0\r\nEndless: ") @@ -444,7 +471,7 @@ def test_max_incomplete_event_size_countermeasure(): # Even more data comes in, still no problem c.receive_data(b"X" * 1000) # We can respond and reuse to get the second pipelined request - c.send(Response(status_code=200, headers=[])) + c.send(Response(status_code=200, headers=[])) # type: ignore[arg-type] c.send(EndOfMessage()) c.start_next_cycle() assert get_all_events(c) == [ @@ -454,20 +481,26 @@ def test_max_incomplete_event_size_countermeasure(): # But once we unpause and try to read the next message, and find that it's # incomplete and the buffer is *still* way too large, then *that's* a # problem: - c.send(Response(status_code=200, headers=[])) + c.send(Response(status_code=200, headers=[])) # type: ignore[arg-type] c.send(EndOfMessage()) c.start_next_cycle() with pytest.raises(RemoteProtocolError): c.next_event() -def test_reuse_simple(): +def test_reuse_simple() -> None: p = ConnectionPair() p.send( CLIENT, [Request(method="GET", target="/", headers=[("Host", "a")]), EndOfMessage()], ) - p.send(SERVER, [Response(status_code=200, headers=[]), EndOfMessage()]) + p.send( + SERVER, + [ + Response(status_code=200, headers=[(b"transfer-encoding", b"chunked")]), + EndOfMessage(), + ], + ) for conn in p.conns: assert conn.states == {CLIENT: DONE, SERVER: DONE} conn.start_next_cycle() @@ -479,10 +512,16 @@ def test_reuse_simple(): EndOfMessage(), ], ) - p.send(SERVER, [Response(status_code=404, headers=[]), EndOfMessage()]) + p.send( + SERVER, + [ + Response(status_code=404, headers=[(b"transfer-encoding", b"chunked")]), + EndOfMessage(), + ], + ) -def test_pipelining(): +def test_pipelining() -> None: # Client doesn't support pipelining, so we have to do this by hand c = Connection(SERVER) assert c.next_event() is NEED_DATA @@ -508,7 +547,7 @@ def test_pipelining(): assert c.next_event() is PAUSED - c.send(Response(status_code=200, headers=[])) + c.send(Response(status_code=200, headers=[])) # type: ignore[arg-type] c.send(EndOfMessage()) assert c.their_state is DONE assert c.our_state is DONE @@ -525,7 +564,7 @@ def test_pipelining(): EndOfMessage(), ] assert c.next_event() is PAUSED - c.send(Response(status_code=200, headers=[])) + c.send(Response(status_code=200, headers=[])) # type: ignore[arg-type] c.send(EndOfMessage()) c.start_next_cycle() @@ -535,7 +574,7 @@ def test_pipelining(): ] # Doesn't pause this time, no trailing data assert c.next_event() is NEED_DATA - c.send(Response(status_code=200, headers=[])) + c.send(Response(status_code=200, headers=[])) # type: ignore[arg-type] c.send(EndOfMessage()) # Arrival of more data triggers pause @@ -554,7 +593,7 @@ def test_pipelining(): c.receive_data(b"FDSA") -def test_protocol_switch(): +def test_protocol_switch() -> None: for (req, deny, accept) in [ ( Request( @@ -562,8 +601,8 @@ def test_protocol_switch(): target="example.com:443", headers=[("Host", "foo"), ("Content-Length", "1")], ), - Response(status_code=404, headers=[]), - Response(status_code=200, headers=[]), + Response(status_code=404, headers=[(b"transfer-encoding", b"chunked")]), + Response(status_code=200, headers=[(b"transfer-encoding", b"chunked")]), ), ( Request( @@ -571,7 +610,7 @@ def test_protocol_switch(): target="/", headers=[("Host", "foo"), ("Content-Length", "1"), ("Upgrade", "a, b")], ), - Response(status_code=200, headers=[]), + Response(status_code=200, headers=[(b"transfer-encoding", b"chunked")]), InformationalResponse(status_code=101, headers=[("Upgrade", "a")]), ), ( @@ -580,9 +619,9 @@ def test_protocol_switch(): target="example.com:443", headers=[("Host", "foo"), ("Content-Length", "1"), ("Upgrade", "a, b")], ), - Response(status_code=404, headers=[]), + Response(status_code=404, headers=[(b"transfer-encoding", b"chunked")]), # Accept CONNECT, not upgrade - Response(status_code=200, headers=[]), + Response(status_code=200, headers=[(b"transfer-encoding", b"chunked")]), ), ( Request( @@ -590,13 +629,13 @@ def test_protocol_switch(): target="example.com:443", headers=[("Host", "foo"), ("Content-Length", "1"), ("Upgrade", "a, b")], ), - Response(status_code=404, headers=[]), + Response(status_code=404, headers=[(b"transfer-encoding", b"chunked")]), # Accept Upgrade, not CONNECT InformationalResponse(status_code=101, headers=[("Upgrade", "b")]), ), ]: - def setup(): + def setup() -> ConnectionPair: p = ConnectionPair() p.send(CLIENT, req) # No switch-related state change stuff yet; the client has to @@ -644,7 +683,7 @@ def setup(): sc.send(EndOfMessage()) sc.start_next_cycle() assert get_all_events(sc) == [ - Request(method="GET", target="/", headers=[], http_version="1.0"), + Request(method="GET", target="/", headers=[], http_version="1.0"), # type: ignore[arg-type] EndOfMessage(), ] @@ -661,7 +700,7 @@ def setup(): p = setup() sc = p.conn[SERVER] - sc.receive_data(b"") == [] + sc.receive_data(b"") assert sc.next_event() is PAUSED sc.send(deny) assert sc.next_event() == ConnectionClosed() @@ -679,12 +718,12 @@ def setup(): p.conn[SERVER].send(Data(data=b"123")) -def test_close_simple(): +def test_close_simple() -> None: # Just immediately closing a new connection without anything having # happened yet. for (who_shot_first, who_shot_second) in [(CLIENT, SERVER), (SERVER, CLIENT)]: - def setup(): + def setup() -> ConnectionPair: p = ConnectionPair() p.send(who_shot_first, ConnectionClosed()) for conn in p.conns: @@ -720,12 +759,15 @@ def setup(): p.conn[who_shot_first].next_event() -def test_close_different_states(): +def test_close_different_states() -> None: req = [ Request(method="GET", target="/foo", headers=[("Host", "a")]), EndOfMessage(), ] - resp = [Response(status_code=200, headers=[]), EndOfMessage()] + resp = [ + Response(status_code=200, headers=[(b"transfer-encoding", b"chunked")]), + EndOfMessage(), + ] # Client before request p = ConnectionPair() @@ -783,7 +825,7 @@ def test_close_different_states(): # Receive several requests and then client shuts down their side of the # connection; we can respond to each -def test_pipelined_close(): +def test_pipelined_close() -> None: c = Connection(SERVER) # 2 requests then a close c.receive_data( @@ -803,7 +845,7 @@ def test_pipelined_close(): EndOfMessage(), ] assert c.states[CLIENT] is DONE - c.send(Response(status_code=200, headers=[])) + c.send(Response(status_code=200, headers=[])) # type: ignore[arg-type] c.send(EndOfMessage()) assert c.states[SERVER] is DONE c.start_next_cycle() @@ -818,21 +860,23 @@ def test_pipelined_close(): ConnectionClosed(), ] assert c.states == {CLIENT: CLOSED, SERVER: SEND_RESPONSE} - c.send(Response(status_code=200, headers=[])) + c.send(Response(status_code=200, headers=[])) # type: ignore[arg-type] c.send(EndOfMessage()) assert c.states == {CLIENT: CLOSED, SERVER: MUST_CLOSE} c.send(ConnectionClosed()) assert c.states == {CLIENT: CLOSED, SERVER: CLOSED} -def test_sendfile(): +def test_sendfile() -> None: class SendfilePlaceholder: - def __len__(self): + def __len__(self) -> int: return 10 placeholder = SendfilePlaceholder() - def setup(header, http_version): + def setup( + header: Tuple[str, str], http_version: str + ) -> Tuple[Connection, Optional[List[bytes]]]: c = Connection(SERVER) receive_and_get( c, "GET / HTTP/{}\r\nHost: a\r\n\r\n".format(http_version).encode("ascii") @@ -841,25 +885,25 @@ def setup(header, http_version): if header: headers.append(header) c.send(Response(status_code=200, headers=headers)) - return c, c.send_with_data_passthrough(Data(data=placeholder)) + return c, c.send_with_data_passthrough(Data(data=placeholder)) # type: ignore c, data = setup(("Content-Length", "10"), "1.1") - assert data == [placeholder] + assert data == [placeholder] # type: ignore # Raises an error if the connection object doesn't think we've sent # exactly 10 bytes c.send(EndOfMessage()) _, data = setup(("Transfer-Encoding", "chunked"), "1.1") - assert placeholder in data - data[data.index(placeholder)] = b"x" * 10 - assert b"".join(data) == b"a\r\nxxxxxxxxxx\r\n" + assert placeholder in data # type: ignore + data[data.index(placeholder)] = b"x" * 10 # type: ignore + assert b"".join(data) == b"a\r\nxxxxxxxxxx\r\n" # type: ignore - c, data = setup(None, "1.0") - assert data == [placeholder] + c, data = setup(None, "1.0") # type: ignore + assert data == [placeholder] # type: ignore assert c.our_state is SEND_BODY -def test_errors(): +def test_errors() -> None: # After a receive error, you can't receive for role in [CLIENT, SERVER]: c = Connection(our_role=role) @@ -875,14 +919,14 @@ def test_errors(): # But we can still yell at the client for sending us gibberish if role is SERVER: assert ( - c.send(Response(status_code=400, headers=[])) + c.send(Response(status_code=400, headers=[])) # type: ignore[arg-type] == b"HTTP/1.1 400 \r\nConnection: close\r\n\r\n" ) # After an error sending, you can no longer send # (This is especially important for things like content-length errors, # where there's complex internal state being modified) - def conn(role): + def conn(role: Type[Sentinel]) -> Connection: c = Connection(our_role=role) if role is SERVER: # Put it into the state where it *could* send a response... @@ -902,8 +946,8 @@ def conn(role): http_version="1.0", ) elif role is SERVER: - good = Response(status_code=200, headers=[]) - bad = Response(status_code=200, headers=[], http_version="1.0") + good = Response(status_code=200, headers=[]) # type: ignore[arg-type,assignment] + bad = Response(status_code=200, headers=[], http_version="1.0") # type: ignore[arg-type,assignment] # Make sure 'good' actually is good c = conn(role) c.send(good) @@ -929,14 +973,14 @@ def conn(role): assert c.their_state is not ERROR -def test_idle_receive_nothing(): +def test_idle_receive_nothing() -> None: # At one point this incorrectly raised an error for role in [CLIENT, SERVER]: c = Connection(role) assert c.next_event() is NEED_DATA -def test_connection_drop(): +def test_connection_drop() -> None: c = Connection(SERVER) c.receive_data(b"GET /") assert c.next_event() is NEED_DATA @@ -945,15 +989,15 @@ def test_connection_drop(): c.next_event() -def test_408_request_timeout(): +def test_408_request_timeout() -> None: # Should be able to send this spontaneously as a server without seeing # anything from client p = ConnectionPair() - p.send(SERVER, Response(status_code=408, headers=[])) + p.send(SERVER, Response(status_code=408, headers=[(b"connection", b"close")])) # This used to raise IndexError -def test_empty_request(): +def test_empty_request() -> None: c = Connection(SERVER) c.receive_data(b"\r\n") with pytest.raises(RemoteProtocolError): @@ -961,7 +1005,7 @@ def test_empty_request(): # This used to raise IndexError -def test_empty_response(): +def test_empty_response() -> None: c = Connection(CLIENT) c.send(Request(method="GET", target="/", headers=[("Host", "a")])) c.receive_data(b"\r\n") @@ -977,7 +1021,7 @@ def test_empty_response(): b"\x16\x03\x01\x00\xa5", # Typical start of a TLS Client Hello ], ) -def test_early_detection_of_invalid_request(data): +def test_early_detection_of_invalid_request(data: bytes) -> None: c = Connection(SERVER) # Early detection should occur before even receiving a `\r\n` c.receive_data(data) @@ -993,7 +1037,7 @@ def test_early_detection_of_invalid_request(data): b"\x16\x03\x03\x00\x31", # Typical start of a TLS Server Hello ], ) -def test_early_detection_of_invalid_response(data): +def test_early_detection_of_invalid_response(data: bytes) -> None: c = Connection(CLIENT) # Early detection should occur before even receiving a `\r\n` c.receive_data(data) @@ -1005,8 +1049,8 @@ def test_early_detection_of_invalid_response(data): # The correct way to handle HEAD is to put whatever headers we *would* have # put if it were a GET -- even though we know that for HEAD, those headers # will be ignored. -def test_HEAD_framing_headers(): - def setup(method, http_version): +def test_HEAD_framing_headers() -> None: + def setup(method: bytes, http_version: bytes) -> Connection: c = Connection(SERVER) c.receive_data( method + b" / HTTP/" + http_version + b"\r\n" + b"Host: example.com\r\n\r\n" @@ -1019,14 +1063,14 @@ def setup(method, http_version): # No Content-Length, HTTP/1.1 peer, should use chunked c = setup(method, b"1.1") assert ( - c.send(Response(status_code=200, headers=[])) == b"HTTP/1.1 200 \r\n" + c.send(Response(status_code=200, headers=[])) == b"HTTP/1.1 200 \r\n" # type: ignore[arg-type] b"Transfer-Encoding: chunked\r\n\r\n" ) # No Content-Length, HTTP/1.0 peer, frame with connection: close c = setup(method, b"1.0") assert ( - c.send(Response(status_code=200, headers=[])) == b"HTTP/1.1 200 \r\n" + c.send(Response(status_code=200, headers=[])) == b"HTTP/1.1 200 \r\n" # type: ignore[arg-type] b"Connection: close\r\n\r\n" ) @@ -1047,7 +1091,7 @@ def setup(method, http_version): ) -def test_special_exceptions_for_lost_connection_in_message_body(): +def test_special_exceptions_for_lost_connection_in_message_body() -> None: c = Connection(SERVER) c.receive_data( b"POST / HTTP/1.1\r\n" b"Host: example.com\r\n" b"Content-Length: 100\r\n\r\n" @@ -1071,7 +1115,7 @@ def test_special_exceptions_for_lost_connection_in_message_body(): assert type(c.next_event()) is Request assert c.next_event() is NEED_DATA c.receive_data(b"8\r\n012345") - assert c.next_event().data == b"012345" + assert c.next_event().data == b"012345" # type: ignore c.receive_data(b"") with pytest.raises(RemoteProtocolError) as excinfo: c.next_event() diff --git a/packages/h11/tests/test_events.py b/packages/h11/tests/test_events.py index e20f741c5..bc6c31370 100644 --- a/packages/h11/tests/test_events.py +++ b/packages/h11/tests/test_events.py @@ -3,57 +3,19 @@ import pytest from .. import _events -from .._events import * +from .._events import ( + ConnectionClosed, + Data, + EndOfMessage, + Event, + InformationalResponse, + Request, + Response, +) from .._util import LocalProtocolError -def test_event_bundle(): - class T(_events._EventBundle): - _fields = ["a", "b"] - _defaults = {"b": 1} - - def _validate(self): - if self.a == 0: - raise ValueError - - # basic construction and methods - t = T(a=1, b=0) - assert repr(t) == "T(a=1, b=0)" - assert t == T(a=1, b=0) - assert not (t == T(a=2, b=0)) - assert not (t != T(a=1, b=0)) - assert t != T(a=2, b=0) - with pytest.raises(TypeError): - hash(t) - - # check defaults - t = T(a=10) - assert t.a == 10 - assert t.b == 1 - - # no positional args - with pytest.raises(TypeError): - T(1) - - with pytest.raises(TypeError): - T(1, a=1, b=0) - - # unknown field - with pytest.raises(TypeError): - T(a=1, b=0, c=10) - - # missing required field - with pytest.raises(TypeError) as exc: - T(b=0) - # make sure we error on the right missing kwarg - assert "kwarg a" in str(exc.value) - - # _validate is called - with pytest.raises(ValueError): - T(a=0, b=0) - - -def test_events(): +def test_events() -> None: with pytest.raises(LocalProtocolError): # Missing Host: req = Request( @@ -114,14 +76,23 @@ def test_events(): ) # Request target is validated - for bad_char in b"\x00\x20\x7f\xee": + for bad_byte in b"\x00\x20\x7f\xee": target = bytearray(b"/") - target.append(bad_char) + target.append(bad_byte) with pytest.raises(LocalProtocolError): Request( method="GET", target=target, headers=[("Host", "a")], http_version="1.1" ) + # Request method is validated + with pytest.raises(LocalProtocolError): + Request( + method="GET / HTTP/1.1", + target=target, + headers=[("Host", "a")], + http_version="1.1", + ) + ir = InformationalResponse(status_code=100, headers=[("Host", "a")]) assert ir.status_code == 100 assert ir.headers == [(b"host", b"a")] @@ -130,19 +101,19 @@ def test_events(): with pytest.raises(LocalProtocolError): InformationalResponse(status_code=200, headers=[("Host", "a")]) - resp = Response(status_code=204, headers=[], http_version="1.0") + resp = Response(status_code=204, headers=[], http_version="1.0") # type: ignore[arg-type] assert resp.status_code == 204 assert resp.headers == [] assert resp.http_version == b"1.0" with pytest.raises(LocalProtocolError): - resp = Response(status_code=100, headers=[], http_version="1.0") + resp = Response(status_code=100, headers=[], http_version="1.0") # type: ignore[arg-type] with pytest.raises(LocalProtocolError): - Response(status_code="100", headers=[], http_version="1.0") + Response(status_code="100", headers=[], http_version="1.0") # type: ignore[arg-type] with pytest.raises(LocalProtocolError): - InformationalResponse(status_code=b"100", headers=[], http_version="1.0") + InformationalResponse(status_code=b"100", headers=[], http_version="1.0") # type: ignore[arg-type] d = Data(data=b"asdf") assert d.data == b"asdf" @@ -154,16 +125,16 @@ def test_events(): assert repr(cc) == "ConnectionClosed()" -def test_intenum_status_code(): +def test_intenum_status_code() -> None: # https://github.com/python-hyper/h11/issues/72 - r = Response(status_code=HTTPStatus.OK, headers=[], http_version="1.0") + r = Response(status_code=HTTPStatus.OK, headers=[], http_version="1.0") # type: ignore[arg-type] assert r.status_code == HTTPStatus.OK assert type(r.status_code) is not type(HTTPStatus.OK) assert type(r.status_code) is int -def test_header_casing(): +def test_header_casing() -> None: r = Request( method="GET", target="/", diff --git a/packages/h11/tests/test_headers.py b/packages/h11/tests/test_headers.py index ff3dc8d75..ba53d088f 100644 --- a/packages/h11/tests/test_headers.py +++ b/packages/h11/tests/test_headers.py @@ -1,9 +1,17 @@ import pytest -from .._headers import * - - -def test_normalize_and_validate(): +from .._events import Request +from .._headers import ( + get_comma_header, + has_expect_100_continue, + Headers, + normalize_and_validate, + set_comma_header, +) +from .._util import LocalProtocolError + + +def test_normalize_and_validate() -> None: assert normalize_and_validate([("foo", "bar")]) == [(b"foo", b"bar")] assert normalize_and_validate([(b"foo", b"bar")]) == [(b"foo", b"bar")] @@ -84,7 +92,7 @@ def test_normalize_and_validate(): assert excinfo.value.error_status_hint == 501 # Not Implemented -def test_get_set_comma_header(): +def test_get_set_comma_header() -> None: headers = normalize_and_validate( [ ("Connection", "close"), @@ -95,10 +103,10 @@ def test_get_set_comma_header(): assert get_comma_header(headers, b"connection") == [b"close", b"foo", b"bar"] - headers = set_comma_header(headers, b"newthing", ["a", "b"]) + headers = set_comma_header(headers, b"newthing", ["a", "b"]) # type: ignore with pytest.raises(LocalProtocolError): - set_comma_header(headers, b"newthing", [" a", "b"]) + set_comma_header(headers, b"newthing", [" a", "b"]) # type: ignore assert headers == [ (b"connection", b"close"), @@ -108,7 +116,7 @@ def test_get_set_comma_header(): (b"newthing", b"b"), ] - headers = set_comma_header(headers, b"whatever", ["different thing"]) + headers = set_comma_header(headers, b"whatever", ["different thing"]) # type: ignore assert headers == [ (b"connection", b"close"), @@ -119,9 +127,7 @@ def test_get_set_comma_header(): ] -def test_has_100_continue(): - from .._events import Request - +def test_has_100_continue() -> None: assert has_expect_100_continue( Request( method="GET", diff --git a/packages/h11/tests/test_helpers.py b/packages/h11/tests/test_helpers.py index 1477947af..c329c7678 100644 --- a/packages/h11/tests/test_helpers.py +++ b/packages/h11/tests/test_helpers.py @@ -1,12 +1,21 @@ -from .helpers import * +from .._events import ( + ConnectionClosed, + Data, + EndOfMessage, + Event, + InformationalResponse, + Request, + Response, +) +from .helpers import normalize_data_events -def test_normalize_data_events(): +def test_normalize_data_events() -> None: assert normalize_data_events( [ Data(data=bytearray(b"1")), Data(data=b"2"), - Response(status_code=200, headers=[]), + Response(status_code=200, headers=[]), # type: ignore[arg-type] Data(data=b"3"), Data(data=b"4"), EndOfMessage(), @@ -16,7 +25,7 @@ def test_normalize_data_events(): ] ) == [ Data(data=b"12"), - Response(status_code=200, headers=[]), + Response(status_code=200, headers=[]), # type: ignore[arg-type] Data(data=b"34"), EndOfMessage(), Data(data=b"567"), diff --git a/packages/h11/tests/test_io.py b/packages/h11/tests/test_io.py index 459a627d2..2b47c0eac 100644 --- a/packages/h11/tests/test_io.py +++ b/packages/h11/tests/test_io.py @@ -1,6 +1,16 @@ +from typing import Any, Callable, Generator, List + import pytest -from .._events import * +from .._events import ( + ConnectionClosed, + Data, + EndOfMessage, + Event, + InformationalResponse, + Request, + Response, +) from .._headers import Headers, normalize_and_validate from .._readers import ( _obsolete_line_fold, @@ -10,7 +20,18 @@ READERS, ) from .._receivebuffer import ReceiveBuffer -from .._state import * +from .._state import ( + CLIENT, + CLOSED, + DONE, + IDLE, + MIGHT_SWITCH_PROTOCOL, + MUST_CLOSE, + SEND_BODY, + SEND_RESPONSE, + SERVER, + SWITCHED_PROTOCOL, +) from .._util import LocalProtocolError from .._writers import ( ChunkedWriter, @@ -40,7 +61,7 @@ ), ( (SERVER, SEND_RESPONSE), - Response(status_code=200, headers=[], reason=b"OK"), + Response(status_code=200, headers=[], reason=b"OK"), # type: ignore[arg-type] b"HTTP/1.1 200 OK\r\n\r\n", ), ( @@ -52,36 +73,35 @@ ), ( (SERVER, SEND_RESPONSE), - InformationalResponse(status_code=101, headers=[], reason=b"Upgrade"), + InformationalResponse(status_code=101, headers=[], reason=b"Upgrade"), # type: ignore[arg-type] b"HTTP/1.1 101 Upgrade\r\n\r\n", ), ] -def dowrite(writer, obj): - got_list = [] +def dowrite(writer: Callable[..., None], obj: Any) -> bytes: + got_list: List[bytes] = [] writer(obj, got_list.append) return b"".join(got_list) -def tw(writer, obj, expected): +def tw(writer: Any, obj: Any, expected: Any) -> None: got = dowrite(writer, obj) assert got == expected -def makebuf(data): +def makebuf(data: bytes) -> ReceiveBuffer: buf = ReceiveBuffer() buf += data return buf -def tr(reader, data, expected): - def check(got): +def tr(reader: Any, data: bytes, expected: Any) -> None: + def check(got: Any) -> None: assert got == expected # Headers should always be returned as bytes, not e.g. bytearray # https://github.com/python-hyper/wsproto/pull/54#issuecomment-377709478 for name, value in getattr(got, "headers", []): - print(name, value) assert type(name) is bytes assert type(value) is bytes @@ -104,17 +124,17 @@ def check(got): assert bytes(buf) == b"trailing" -def test_writers_simple(): +def test_writers_simple() -> None: for ((role, state), event, binary) in SIMPLE_CASES: tw(WRITERS[role, state], event, binary) -def test_readers_simple(): +def test_readers_simple() -> None: for ((role, state), event, binary) in SIMPLE_CASES: tr(READERS[role, state], binary, event) -def test_writers_unusual(): +def test_writers_unusual() -> None: # Simple test of the write_headers utility routine tw( write_headers, @@ -145,7 +165,7 @@ def test_writers_unusual(): ) -def test_readers_unusual(): +def test_readers_unusual() -> None: # Reading HTTP/1.0 tr( READERS[CLIENT, IDLE], @@ -162,7 +182,7 @@ def test_readers_unusual(): tr( READERS[CLIENT, IDLE], b"HEAD /foo HTTP/1.0\r\n\r\n", - Request(method="HEAD", target="/foo", headers=[], http_version="1.0"), + Request(method="HEAD", target="/foo", headers=[], http_version="1.0"), # type: ignore[arg-type] ) tr( @@ -305,7 +325,7 @@ def test_readers_unusual(): tr(READERS[CLIENT, IDLE], b"HEAD /foo HTTP/1.1\r\n" b": line\r\n\r\n", None) -def test__obsolete_line_fold_bytes(): +def test__obsolete_line_fold_bytes() -> None: # _obsolete_line_fold has a defensive cast to bytearray, which is # necessary to protect against O(n^2) behavior in case anyone ever passes # in regular bytestrings... but right now we never pass in regular @@ -318,7 +338,9 @@ def test__obsolete_line_fold_bytes(): ] -def _run_reader_iter(reader, buf, do_eof): +def _run_reader_iter( + reader: Any, buf: bytes, do_eof: bool +) -> Generator[Any, None, None]: while True: event = reader(buf) if event is None: @@ -333,12 +355,12 @@ def _run_reader_iter(reader, buf, do_eof): yield reader.read_eof() -def _run_reader(*args): +def _run_reader(*args: Any) -> List[Event]: events = list(_run_reader_iter(*args)) return normalize_data_events(events) -def t_body_reader(thunk, data, expected, do_eof=False): +def t_body_reader(thunk: Any, data: bytes, expected: Any, do_eof: bool = False) -> None: # Simple: consume whole thing print("Test 1") buf = makebuf(data) @@ -361,7 +383,7 @@ def t_body_reader(thunk, data, expected, do_eof=False): assert _run_reader(thunk(), buf, False) == expected -def test_ContentLengthReader(): +def test_ContentLengthReader() -> None: t_body_reader(lambda: ContentLengthReader(0), b"", [EndOfMessage()]) t_body_reader( @@ -371,7 +393,7 @@ def test_ContentLengthReader(): ) -def test_Http10Reader(): +def test_Http10Reader() -> None: t_body_reader(Http10Reader, b"", [EndOfMessage()], do_eof=True) t_body_reader(Http10Reader, b"asdf", [Data(data=b"asdf")], do_eof=False) t_body_reader( @@ -379,7 +401,7 @@ def test_Http10Reader(): ) -def test_ChunkedReader(): +def test_ChunkedReader() -> None: t_body_reader(ChunkedReader, b"0\r\n\r\n", [EndOfMessage()]) t_body_reader( @@ -433,8 +455,14 @@ def test_ChunkedReader(): [Data(data=b"xxxxx"), EndOfMessage()], ) + t_body_reader( + ChunkedReader, + b"5 \r\n01234\r\n" + b"0\r\n\r\n", + [Data(data=b"01234"), EndOfMessage()], + ) + -def test_ContentLengthWriter(): +def test_ContentLengthWriter() -> None: w = ContentLengthWriter(5) assert dowrite(w, Data(data=b"123")) == b"123" assert dowrite(w, Data(data=b"45")) == b"45" @@ -461,7 +489,7 @@ def test_ContentLengthWriter(): dowrite(w, EndOfMessage(headers=[("Etag", "asdf")])) -def test_ChunkedWriter(): +def test_ChunkedWriter() -> None: w = ChunkedWriter() assert dowrite(w, Data(data=b"aaa")) == b"3\r\naaa\r\n" assert dowrite(w, Data(data=b"a" * 20)) == b"14\r\n" + b"a" * 20 + b"\r\n" @@ -476,7 +504,7 @@ def test_ChunkedWriter(): ) -def test_Http10Writer(): +def test_Http10Writer() -> None: w = Http10Writer() assert dowrite(w, Data(data=b"1234")) == b"1234" assert dowrite(w, EndOfMessage()) == b"" @@ -485,12 +513,12 @@ def test_Http10Writer(): dowrite(w, EndOfMessage(headers=[("Etag", "asdf")])) -def test_reject_garbage_after_request_line(): +def test_reject_garbage_after_request_line() -> None: with pytest.raises(LocalProtocolError): tr(READERS[SERVER, SEND_RESPONSE], b"HTTP/1.0 200 OK\x00xxxx\r\n\r\n", None) -def test_reject_garbage_after_response_line(): +def test_reject_garbage_after_response_line() -> None: with pytest.raises(LocalProtocolError): tr( READERS[CLIENT, IDLE], @@ -499,7 +527,7 @@ def test_reject_garbage_after_response_line(): ) -def test_reject_garbage_in_header_line(): +def test_reject_garbage_in_header_line() -> None: with pytest.raises(LocalProtocolError): tr( READERS[CLIENT, IDLE], @@ -508,7 +536,7 @@ def test_reject_garbage_in_header_line(): ) -def test_reject_non_vchar_in_path(): +def test_reject_non_vchar_in_path() -> None: for bad_char in b"\x00\x20\x7f\xee": message = bytearray(b"HEAD /") message.append(bad_char) @@ -518,7 +546,7 @@ def test_reject_non_vchar_in_path(): # https://github.com/python-hyper/h11/issues/57 -def test_allow_some_garbage_in_cookies(): +def test_allow_some_garbage_in_cookies() -> None: tr( READERS[CLIENT, IDLE], b"HEAD /foo HTTP/1.1\r\n" @@ -536,7 +564,7 @@ def test_allow_some_garbage_in_cookies(): ) -def test_host_comes_first(): +def test_host_comes_first() -> None: tw( write_headers, normalize_and_validate([("foo", "bar"), ("Host", "example.com")]), diff --git a/packages/h11/tests/test_receivebuffer.py b/packages/h11/tests/test_receivebuffer.py index 3a61f9dc5..21a3870b6 100644 --- a/packages/h11/tests/test_receivebuffer.py +++ b/packages/h11/tests/test_receivebuffer.py @@ -1,11 +1,12 @@ import re +from typing import Tuple import pytest from .._receivebuffer import ReceiveBuffer -def test_receivebuffer(): +def test_receivebuffer() -> None: b = ReceiveBuffer() assert not b assert len(b) == 0 @@ -118,7 +119,7 @@ def test_receivebuffer(): ), ], ) -def test_receivebuffer_for_invalid_delimiter(data): +def test_receivebuffer_for_invalid_delimiter(data: Tuple[bytes]) -> None: b = ReceiveBuffer() for line in data: diff --git a/packages/h11/tests/test_state.py b/packages/h11/tests/test_state.py index efe83f0ad..bc974e636 100644 --- a/packages/h11/tests/test_state.py +++ b/packages/h11/tests/test_state.py @@ -1,12 +1,33 @@ import pytest -from .._events import * -from .._state import * -from .._state import _SWITCH_CONNECT, _SWITCH_UPGRADE, ConnectionState +from .._events import ( + ConnectionClosed, + Data, + EndOfMessage, + Event, + InformationalResponse, + Request, + Response, +) +from .._state import ( + _SWITCH_CONNECT, + _SWITCH_UPGRADE, + CLIENT, + CLOSED, + ConnectionState, + DONE, + IDLE, + MIGHT_SWITCH_PROTOCOL, + MUST_CLOSE, + SEND_BODY, + SEND_RESPONSE, + SERVER, + SWITCHED_PROTOCOL, +) from .._util import LocalProtocolError -def test_ConnectionState(): +def test_ConnectionState() -> None: cs = ConnectionState() # Basic event-triggered transitions @@ -38,7 +59,7 @@ def test_ConnectionState(): assert cs.states == {CLIENT: MUST_CLOSE, SERVER: CLOSED} -def test_ConnectionState_keep_alive(): +def test_ConnectionState_keep_alive() -> None: # keep_alive = False cs = ConnectionState() cs.process_event(CLIENT, Request) @@ -51,7 +72,7 @@ def test_ConnectionState_keep_alive(): assert cs.states == {CLIENT: MUST_CLOSE, SERVER: MUST_CLOSE} -def test_ConnectionState_keep_alive_in_DONE(): +def test_ConnectionState_keep_alive_in_DONE() -> None: # Check that if keep_alive is disabled when the CLIENT is already in DONE, # then this is sufficient to immediately trigger the DONE -> MUST_CLOSE # transition @@ -63,7 +84,7 @@ def test_ConnectionState_keep_alive_in_DONE(): assert cs.states[CLIENT] is MUST_CLOSE -def test_ConnectionState_switch_denied(): +def test_ConnectionState_switch_denied() -> None: for switch_type in (_SWITCH_CONNECT, _SWITCH_UPGRADE): for deny_early in (True, False): cs = ConnectionState() @@ -107,7 +128,7 @@ def test_ConnectionState_switch_denied(): } -def test_ConnectionState_protocol_switch_accepted(): +def test_ConnectionState_protocol_switch_accepted() -> None: for switch_event in [_SWITCH_UPGRADE, _SWITCH_CONNECT]: cs = ConnectionState() cs.process_client_switch_proposal(switch_event) @@ -125,7 +146,7 @@ def test_ConnectionState_protocol_switch_accepted(): assert cs.states == {CLIENT: SWITCHED_PROTOCOL, SERVER: SWITCHED_PROTOCOL} -def test_ConnectionState_double_protocol_switch(): +def test_ConnectionState_double_protocol_switch() -> None: # CONNECT + Upgrade is legal! Very silly, but legal. So we support # it. Because sometimes doing the silly thing is easier than not. for server_switch in [None, _SWITCH_UPGRADE, _SWITCH_CONNECT]: @@ -144,7 +165,7 @@ def test_ConnectionState_double_protocol_switch(): assert cs.states == {CLIENT: SWITCHED_PROTOCOL, SERVER: SWITCHED_PROTOCOL} -def test_ConnectionState_inconsistent_protocol_switch(): +def test_ConnectionState_inconsistent_protocol_switch() -> None: for client_switches, server_switch in [ ([], _SWITCH_CONNECT), ([], _SWITCH_UPGRADE), @@ -152,14 +173,14 @@ def test_ConnectionState_inconsistent_protocol_switch(): ([_SWITCH_CONNECT], _SWITCH_UPGRADE), ]: cs = ConnectionState() - for client_switch in client_switches: + for client_switch in client_switches: # type: ignore[attr-defined] cs.process_client_switch_proposal(client_switch) cs.process_event(CLIENT, Request) with pytest.raises(LocalProtocolError): cs.process_event(SERVER, Response, server_switch) -def test_ConnectionState_keepalive_protocol_switch_interaction(): +def test_ConnectionState_keepalive_protocol_switch_interaction() -> None: # keep_alive=False + pending_switch_proposals cs = ConnectionState() cs.process_client_switch_proposal(_SWITCH_UPGRADE) @@ -177,7 +198,7 @@ def test_ConnectionState_keepalive_protocol_switch_interaction(): assert cs.states == {CLIENT: MUST_CLOSE, SERVER: SEND_BODY} -def test_ConnectionState_reuse(): +def test_ConnectionState_reuse() -> None: cs = ConnectionState() with pytest.raises(LocalProtocolError): @@ -242,7 +263,7 @@ def test_ConnectionState_reuse(): assert cs.states == {CLIENT: IDLE, SERVER: IDLE} -def test_server_request_is_illegal(): +def test_server_request_is_illegal() -> None: # There used to be a bug in how we handled the Request special case that # made this allowed... cs = ConnectionState() diff --git a/packages/h11/tests/test_util.py b/packages/h11/tests/test_util.py index d851bdcb6..79bc09518 100644 --- a/packages/h11/tests/test_util.py +++ b/packages/h11/tests/test_util.py @@ -1,18 +1,26 @@ import re import sys import traceback +from typing import NoReturn import pytest -from .._util import * +from .._util import ( + bytesify, + LocalProtocolError, + ProtocolError, + RemoteProtocolError, + Sentinel, + validate, +) -def test_ProtocolError(): +def test_ProtocolError() -> None: with pytest.raises(TypeError): ProtocolError("abstract base class") -def test_LocalProtocolError(): +def test_LocalProtocolError() -> None: try: raise LocalProtocolError("foo") except LocalProtocolError as e: @@ -25,7 +33,7 @@ def test_LocalProtocolError(): assert str(e) == "foo" assert e.error_status_hint == 418 - def thunk(): + def thunk() -> NoReturn: raise LocalProtocolError("a", error_status_hint=420) try: @@ -42,8 +50,8 @@ def thunk(): assert new_traceback.endswith(orig_traceback) -def test_validate(): - my_re = re.compile(br"(?P[0-9]+)\.(?P[0-9]+)") +def test_validate() -> None: + my_re = re.compile(rb"(?P[0-9]+)\.(?P[0-9]+)") with pytest.raises(LocalProtocolError): validate(my_re, b"0.") @@ -57,8 +65,8 @@ def test_validate(): validate(my_re, b"0.1\n") -def test_validate_formatting(): - my_re = re.compile(br"foo") +def test_validate_formatting() -> None: + my_re = re.compile(rb"foo") with pytest.raises(LocalProtocolError) as excinfo: validate(my_re, b"", "oops") @@ -73,21 +81,26 @@ def test_validate_formatting(): assert "oops 10 xx" in str(excinfo.value) -def test_make_sentinel(): - S = make_sentinel("S") +def test_make_sentinel() -> None: + class S(Sentinel, metaclass=Sentinel): + pass + assert repr(S) == "S" assert S == S assert type(S).__name__ == "S" assert S in {S} assert type(S) is S - S2 = make_sentinel("S2") + + class S2(Sentinel, metaclass=Sentinel): + pass + assert repr(S2) == "S2" assert S != S2 assert S not in {S2} assert type(S) is not type(S2) -def test_bytesify(): +def test_bytesify() -> None: assert bytesify(b"123") == b"123" assert bytesify(bytearray(b"123")) == b"123" assert bytesify("123") == b"123" diff --git a/packages/h2/__init__.py b/packages/h2/__init__.py index 6d9e28e51..d3fd0cb67 100644 --- a/packages/h2/__init__.py +++ b/packages/h2/__init__.py @@ -1,8 +1,8 @@ # -*- coding: utf-8 -*- """ -hyper-h2 +h2 ~~ A HTTP/2 implementation. """ -__version__ = '4.0.0' +__version__ = '4.1.0' diff --git a/packages/h2/config.py b/packages/h2/config.py index 730b61124..915df55c1 100644 --- a/packages/h2/config.py +++ b/packages/h2/config.py @@ -6,6 +6,8 @@ Objects for controlling the configuration of the HTTP/2 stack. """ +import sys + class _BooleanConfigOption: """ @@ -27,10 +29,10 @@ def __set__(self, instance, value): class DummyLogger: """ - An Logger object that does not actual logging, hence a DummyLogger. + A Logger object that does not actual logging, hence a DummyLogger. For the class the log operation is merely a no-op. The intent is to avoid - conditionals being sprinkled throughout the hyper-h2 code for calls to + conditionals being sprinkled throughout the h2 code for calls to logging functions when no logger is passed into the corresponding object. """ def __init__(self, *vargs): @@ -49,6 +51,29 @@ def trace(self, *vargs, **kwargs): pass +class OutputLogger: + """ + A Logger object that prints to stderr or any other file-like object. + + This class is provided for convenience and not part of the stable API. + + :param file: A file-like object passed to the print function. + Defaults to ``sys.stderr``. + :param trace: Enables trace-level output. Defaults to ``False``. + """ + def __init__(self, file=None, trace_level=False): + super().__init__() + self.file = file or sys.stderr + self.trace_level = trace_level + + def debug(self, fmtstr, *args): + print(f"h2 (debug): {fmtstr % args}", file=self.file) + + def trace(self, fmtstr, *args): + if self.trace_level: + print(f"h2 (trace): {fmtstr % args}", file=self.file) + + class H2Configuration: """ An object that controls the way a single HTTP/2 connection behaves. @@ -101,7 +126,7 @@ class H2Configuration: :param normalize_inbound_headers: Controls whether the headers received by this object are normalized according to the rules of RFC 7540. - Disabling this setting may lead to hyper-h2 emitting header blocks that + Disabling this setting may lead to h2 emitting header blocks that some RFCs forbid, e.g. with multiple cookie fields. .. versionadded:: 3.0.0 diff --git a/packages/h2/connection.py b/packages/h2/connection.py index aa3071144..25251e20a 100644 --- a/packages/h2/connection.py +++ b/packages/h2/connection.py @@ -806,8 +806,8 @@ def send_data(self, stream_id, data, end_stream=False, pad_length=None): :class:`FrameTooLargeError ` will be raised. - Hyper-h2 does this to avoid buffering the data internally. If the user - has more data to send than hyper-h2 will allow, consider breaking it up + h2 does this to avoid buffering the data internally. If the user + has more data to send than h2 will allow, consider breaking it up and buffering it externally. :param stream_id: The ID of the stream on which to send the data. @@ -1097,10 +1097,10 @@ def advertise_alternative_service(self, The explicit method of advertising can be done as long as the connection is active. The implicit method can only be done after the client has sent the request headers and before the server has sent the - response headers: outside of those points, Hyper-h2 will forbid sending + response headers: outside of those points, h2 will forbid sending the Alternative Service advertisement by raising a ProtocolError. - The ``field_value`` parameter is specified in RFC 7838. Hyper-h2 does + The ``field_value`` parameter is specified in RFC 7838. h2 does not validate or introspect this argument: the user is required to ensure that it's well-formed. ``field_value`` corresponds to RFC 7838's "Alternative Service Field Value". @@ -1109,13 +1109,13 @@ def advertise_alternative_service(self, advertising Alternative Services. The implicit method of advertising Alternative Services has a number of subtleties and can lead to inconsistencies between the server and - client. Hyper-h2 allows both mechanisms, but caution is + client. h2 allows both mechanisms, but caution is strongly advised. .. versionadded:: 2.3.0 :param field_value: The RFC 7838 Alternative Service Field Value. This - argument is not introspected by Hyper-h2: the user is responsible + argument is not introspected by h2: the user is responsible for ensuring that it is well-formed. :type field_value: ``bytes`` @@ -1173,17 +1173,17 @@ def prioritize(self, stream_id, weight=None, depends_on=None, stream is closed. .. warning:: RFC 7540 allows for servers to change the priority of - streams. However, hyper-h2 **does not** allow server + streams. However, h2 **does not** allow server stacks to do this. This is because most clients do not adequately know how to respond when provided conflicting priority information, and relatively little utility is provided by making that functionality available. - .. note:: hyper-h2 **does not** maintain any information about the - RFC 7540 priority tree. That means that hyper-h2 does not + .. note:: h2 **does not** maintain any information about the + RFC 7540 priority tree. That means that h2 does not prevent incautious users from creating invalid priority trees, particularly by creating priority loops. While some - basic error checking is provided by hyper-h2, users are + basic error checking is provided by h2, users are strongly recommended to understand their prioritisation strategies before using the priority tools here. @@ -1481,6 +1481,7 @@ def _receive_frame(self, frame): .. versionchanged:: 2.0.0 Removed from the public API. """ + self.config.logger.trace("Received frame: %s", repr(frame)) try: # I don't love using __class__ here, maybe reconsider it. frames, events = self._frame_dispatch_table[frame.__class__](frame) @@ -2028,9 +2029,9 @@ def _add_frame_priority(frame, weight=None, depends_on=None, exclusive=None): def _decode_headers(decoder, encoded_header_block): """ Decode a HPACK-encoded header block, translating HPACK exceptions into - sensible hyper-h2 errors. + sensible h2 errors. - This only ever returns bytestring headers: hyper-h2 may emit them as + This only ever returns bytestring headers: h2 may emit them as unicode later, but internally it processes them as bytestrings only. """ try: diff --git a/packages/h2/events.py b/packages/h2/events.py index 08b318671..66c3cff4a 100644 --- a/packages/h2/events.py +++ b/packages/h2/events.py @@ -311,7 +311,7 @@ class RemoteSettingsChanged(Event): its settings. It contains a complete inventory of changed settings, including their previous values. - In HTTP/2, settings changes need to be acknowledged. hyper-h2 automatically + In HTTP/2, settings changes need to be acknowledged. h2 automatically acknowledges settings changes for efficiency. However, it is possible that the caller may not be happy with the changed setting. @@ -322,7 +322,7 @@ class RemoteSettingsChanged(Event): .. versionchanged:: 2.0.0 Prior to this version the user needed to acknowledge settings changes. - This is no longer the case: hyper-h2 now automatically acknowledges + This is no longer the case: h2 now automatically acknowledges them. """ def __init__(self): @@ -414,10 +414,10 @@ class StreamReset(Event): The StreamReset event is fired in two situations. The first is when the remote party forcefully resets the stream. The second is when the remote party has made a protocol error which only affects a single stream. In this - case, Hyper-h2 will terminate the stream early and return this event. + case, h2 will terminate the stream early and return this event. .. versionchanged:: 2.0.0 - This event is now fired when Hyper-h2 automatically resets a stream. + This event is now fired when h2 automatically resets a stream. """ def __init__(self): #: The Stream ID of the stream that was reset. @@ -561,12 +561,12 @@ class AlternativeServiceAvailable(Event): This event always carries the origin to which the ALTSVC information applies. That origin is either supplied by the server directly, or inferred - by hyper-h2 from the ``:authority`` pseudo-header field that was sent by + by h2 from the ``:authority`` pseudo-header field that was sent by the user when initiating a given stream. This event also carries what RFC 7838 calls the "Alternative Service Field Value", which is formatted like a HTTP header field and contains the - relevant alternative service information. Hyper-h2 does not parse or in any + relevant alternative service information. h2 does not parse or in any way modify that information: the user is required to do that. This event can only be fired on the client end of a connection. @@ -576,13 +576,13 @@ class AlternativeServiceAvailable(Event): def __init__(self): #: The origin to which the alternative service field value applies. #: This field is either supplied by the server directly, or inferred by - #: hyper-h2 from the ``:authority`` pseudo-header field that was sent + #: h2 from the ``:authority`` pseudo-header field that was sent #: by the user when initiating the stream on which the frame was #: received. self.origin = None #: The ALTSVC field value. This contains information about the HTTP - #: alternative service being advertised by the server. Hyper-h2 does + #: alternative service being advertised by the server. h2 does #: not parse this field: it is left exactly as sent by the server. The #: structure of the data in this field is given by `RFC 7838 Section 3 #: `_. @@ -600,11 +600,11 @@ def __repr__(self): class UnknownFrameReceived(Event): """ The UnknownFrameReceived event is fired when the remote peer sends a frame - that hyper-h2 does not understand. This occurs primarily when the remote - peer is employing HTTP/2 extensions that hyper-h2 doesn't know anything + that h2 does not understand. This occurs primarily when the remote + peer is employing HTTP/2 extensions that h2 doesn't know anything about. - RFC 7540 requires that HTTP/2 implementations ignore these frames. hyper-h2 + RFC 7540 requires that HTTP/2 implementations ignore these frames. h2 does so. However, this event is fired to allow implementations to perform special processing on those frames if needed (e.g. if the implementation is capable of handling the frame itself). diff --git a/packages/h2/stream.py b/packages/h2/stream.py index 3c29b2431..817636f8e 100644 --- a/packages/h2/stream.py +++ b/packages/h2/stream.py @@ -528,7 +528,7 @@ def send_alt_svc(self, previous_state): # For this reason, our state machine implementation below allows for # PUSH_PROMISE frames both in the IDLE state (as in the diagram), but also # in the OPEN, HALF_CLOSED_LOCAL, and HALF_CLOSED_REMOTE states. -# Essentially, for hyper-h2, PUSH_PROMISE frames are effectively sent on +# Essentially, for h2, PUSH_PROMISE frames are effectively sent on # two streams. # # The _transitions dictionary contains a mapping of tuples of diff --git a/packages/h2/utilities.py b/packages/h2/utilities.py index eb07f575e..c7a620d8d 100644 --- a/packages/h2/utilities.py +++ b/packages/h2/utilities.py @@ -203,9 +203,12 @@ def validate_headers(headers, hdr_validation_flags): # checking remains somewhat expensive, and attempts should be made wherever # possible to reduce the time spent doing them. # - # For example, we avoid tuple upacking in loops because it represents a + # For example, we avoid tuple unpacking in loops because it represents a # fixed cost that we don't want to spend, instead indexing into the header # tuples. + headers = _reject_empty_header_names( + headers, hdr_validation_flags + ) headers = _reject_uppercase_header_fields( headers, hdr_validation_flags ) @@ -229,6 +232,19 @@ def validate_headers(headers, hdr_validation_flags): return headers +def _reject_empty_header_names(headers, hdr_validation_flags): + """ + Raises a ProtocolError if any header names are empty (length 0). + While hpack decodes such headers without errors, they are semantically + forbidden in HTTP, see RFC 7230, stating that they must be at least one + character long. + """ + for header in headers: + if len(header[0]) == 0: + raise ProtocolError("Received header name with zero length.") + yield header + + def _reject_uppercase_header_fields(headers, hdr_validation_flags): """ Raises a ProtocolError if any uppercase character is found in a header @@ -272,7 +288,7 @@ def _reject_te(headers, hdr_validation_flags): if header[0] in (b'te', u'te'): if header[1].lower() not in (b'trailers', u'trailers'): raise ProtocolError( - "Invalid value for Transfer-Encoding header: %s" % + "Invalid value for TE header: %s" % header[1] ) diff --git a/packages/httpcore/__init__.py b/packages/httpcore/__init__.py index 2dde34e39..65abe9716 100644 --- a/packages/httpcore/__init__.py +++ b/packages/httpcore/__init__.py @@ -1,10 +1,25 @@ -from ._async.base import AsyncByteStream, AsyncHTTPTransport -from ._async.connection_pool import AsyncConnectionPool -from ._async.http_proxy import AsyncHTTPProxy -from ._bytestreams import AsyncIteratorByteStream, ByteStream, IteratorByteStream +from ._api import request, stream +from ._async import ( + AsyncConnectionInterface, + AsyncConnectionPool, + AsyncHTTP2Connection, + AsyncHTTP11Connection, + AsyncHTTPConnection, + AsyncHTTPProxy, + AsyncSOCKSProxy, +) +from ._backends.base import ( + SOCKET_OPTION, + AsyncNetworkBackend, + AsyncNetworkStream, + NetworkBackend, + NetworkStream, +) +from ._backends.mock import AsyncMockBackend, AsyncMockStream, MockBackend, MockStream +from ._backends.sync import SyncBackend from ._exceptions import ( - CloseError, ConnectError, + ConnectionNotAvailable, ConnectTimeout, LocalProtocolError, NetworkError, @@ -19,45 +34,106 @@ WriteError, WriteTimeout, ) -from ._sync.base import SyncByteStream, SyncHTTPTransport -from ._sync.connection_pool import SyncConnectionPool -from ._sync.http_proxy import SyncHTTPProxy +from ._models import URL, Origin, Request, Response +from ._ssl import default_ssl_context +from ._sync import ( + ConnectionInterface, + ConnectionPool, + HTTP2Connection, + HTTP11Connection, + HTTPConnection, + HTTPProxy, + SOCKSProxy, +) + +# The 'httpcore.AnyIOBackend' class is conditional on 'anyio' being installed. +try: + from ._backends.anyio import AnyIOBackend +except ImportError: # pragma: nocover + + class AnyIOBackend: # type: ignore + def __init__(self, *args, **kwargs): # type: ignore + msg = ( + "Attempted to use 'httpcore.AnyIOBackend' but 'anyio' is not installed." + ) + raise RuntimeError(msg) + + +# The 'httpcore.TrioBackend' class is conditional on 'trio' being installed. +try: + from ._backends.trio import TrioBackend +except ImportError: # pragma: nocover + + class TrioBackend: # type: ignore + def __init__(self, *args, **kwargs): # type: ignore + msg = "Attempted to use 'httpcore.TrioBackend' but 'trio' is not installed." + raise RuntimeError(msg) + __all__ = [ - "AsyncByteStream", + # top-level requests + "request", + "stream", + # models + "Origin", + "URL", + "Request", + "Response", + # async + "AsyncHTTPConnection", "AsyncConnectionPool", "AsyncHTTPProxy", - "AsyncHTTPTransport", - "AsyncIteratorByteStream", - "ByteStream", - "CloseError", - "ConnectError", - "ConnectTimeout", - "IteratorByteStream", - "LocalProtocolError", - "NetworkError", - "PoolTimeout", - "ProtocolError", + "AsyncHTTP11Connection", + "AsyncHTTP2Connection", + "AsyncConnectionInterface", + "AsyncSOCKSProxy", + # sync + "HTTPConnection", + "ConnectionPool", + "HTTPProxy", + "HTTP11Connection", + "HTTP2Connection", + "ConnectionInterface", + "SOCKSProxy", + # network backends, implementations + "SyncBackend", + "AnyIOBackend", + "TrioBackend", + # network backends, mock implementations + "AsyncMockBackend", + "AsyncMockStream", + "MockBackend", + "MockStream", + # network backends, interface + "AsyncNetworkStream", + "AsyncNetworkBackend", + "NetworkStream", + "NetworkBackend", + # util + "default_ssl_context", + "SOCKET_OPTION", + # exceptions + "ConnectionNotAvailable", "ProxyError", - "ReadError", - "ReadTimeout", + "ProtocolError", + "LocalProtocolError", "RemoteProtocolError", - "SyncByteStream", - "SyncConnectionPool", - "SyncHTTPProxy", - "SyncHTTPTransport", - "TimeoutException", "UnsupportedProtocol", - "WriteError", + "TimeoutException", + "PoolTimeout", + "ConnectTimeout", + "ReadTimeout", "WriteTimeout", + "NetworkError", + "ConnectError", + "ReadError", + "WriteError", ] -__version__ = "0.13.6" -__locals = locals() +__version__ = "0.18.0" -for _name in __all__: - if not _name.startswith("__"): - # Save original source module, used by Sphinx. - __locals[_name].__source_module__ = __locals[_name].__module__ - # Override module for prettier repr(). - setattr(__locals[_name], "__module__", "httpcore") # noqa + +__locals = locals() +for __name in __all__: + if not __name.startswith("__"): + setattr(__locals[__name], "__module__", "httpcore") # noqa diff --git a/packages/httpcore/_api.py b/packages/httpcore/_api.py new file mode 100644 index 000000000..854235f5f --- /dev/null +++ b/packages/httpcore/_api.py @@ -0,0 +1,92 @@ +from contextlib import contextmanager +from typing import Iterator, Optional, Union + +from ._models import URL, Extensions, HeaderTypes, Response +from ._sync.connection_pool import ConnectionPool + + +def request( + method: Union[bytes, str], + url: Union[URL, bytes, str], + *, + headers: HeaderTypes = None, + content: Union[bytes, Iterator[bytes], None] = None, + extensions: Optional[Extensions] = None, +) -> Response: + """ + Sends an HTTP request, returning the response. + + ``` + response = httpcore.request("GET", "https://www.example.com/") + ``` + + Arguments: + method: The HTTP method for the request. Typically one of `"GET"`, + `"OPTIONS"`, `"HEAD"`, `"POST"`, `"PUT"`, `"PATCH"`, or `"DELETE"`. + url: The URL of the HTTP request. Either as an instance of `httpcore.URL`, + or as str/bytes. + headers: The HTTP request headers. Either as a dictionary of str/bytes, + or as a list of two-tuples of str/bytes. + content: The content of the request body. Either as bytes, + or as a bytes iterator. + extensions: A dictionary of optional extra information included on the request. + Possible keys include `"timeout"`. + + Returns: + An instance of `httpcore.Response`. + """ + with ConnectionPool() as pool: + return pool.request( + method=method, + url=url, + headers=headers, + content=content, + extensions=extensions, + ) + + +@contextmanager +def stream( + method: Union[bytes, str], + url: Union[URL, bytes, str], + *, + headers: HeaderTypes = None, + content: Union[bytes, Iterator[bytes], None] = None, + extensions: Optional[Extensions] = None, +) -> Iterator[Response]: + """ + Sends an HTTP request, returning the response within a content manager. + + ``` + with httpcore.stream("GET", "https://www.example.com/") as response: + ... + ``` + + When using the `stream()` function, the body of the response will not be + automatically read. If you want to access the response body you should + either use `content = response.read()`, or `for chunk in response.iter_content()`. + + Arguments: + method: The HTTP method for the request. Typically one of `"GET"`, + `"OPTIONS"`, `"HEAD"`, `"POST"`, `"PUT"`, `"PATCH"`, or `"DELETE"`. + url: The URL of the HTTP request. Either as an instance of `httpcore.URL`, + or as str/bytes. + headers: The HTTP request headers. Either as a dictionary of str/bytes, + or as a list of two-tuples of str/bytes. + content: The content of the request body. Either as bytes, + or as a bytes iterator. + extensions: A dictionary of optional extra information included on the request. + Possible keys include `"timeout"`. + + Returns: + An instance of `httpcore.Response`. + """ + with ConnectionPool() as pool: + with pool.stream( + method=method, + url=url, + headers=headers, + content=content, + extensions=extensions, + ) as response: + yield response diff --git a/packages/httpcore/_async/__init__.py b/packages/httpcore/_async/__init__.py index e69de29bb..88dc7f01e 100644 --- a/packages/httpcore/_async/__init__.py +++ b/packages/httpcore/_async/__init__.py @@ -0,0 +1,39 @@ +from .connection import AsyncHTTPConnection +from .connection_pool import AsyncConnectionPool +from .http11 import AsyncHTTP11Connection +from .http_proxy import AsyncHTTPProxy +from .interfaces import AsyncConnectionInterface + +try: + from .http2 import AsyncHTTP2Connection +except ImportError: # pragma: nocover + + class AsyncHTTP2Connection: # type: ignore + def __init__(self, *args, **kwargs) -> None: # type: ignore + raise RuntimeError( + "Attempted to use http2 support, but the `h2` package is not " + "installed. Use 'pip install httpcore[http2]'." + ) + + +try: + from .socks_proxy import AsyncSOCKSProxy +except ImportError: # pragma: nocover + + class AsyncSOCKSProxy: # type: ignore + def __init__(self, *args, **kwargs) -> None: # type: ignore + raise RuntimeError( + "Attempted to use SOCKS support, but the `socksio` package is not " + "installed. Use 'pip install httpcore[socks]'." + ) + + +__all__ = [ + "AsyncHTTPConnection", + "AsyncConnectionPool", + "AsyncHTTPProxy", + "AsyncHTTP11Connection", + "AsyncHTTP2Connection", + "AsyncConnectionInterface", + "AsyncSOCKSProxy", +] diff --git a/packages/httpcore/_async/base.py b/packages/httpcore/_async/base.py deleted file mode 100644 index 2b3961c29..000000000 --- a/packages/httpcore/_async/base.py +++ /dev/null @@ -1,122 +0,0 @@ -import enum -from types import TracebackType -from typing import AsyncIterator, Tuple, Type - -from .._types import URL, Headers, T - - -class NewConnectionRequired(Exception): - pass - - -class ConnectionState(enum.IntEnum): - """ - PENDING READY - | | ^ - v V | - ACTIVE | - | | | - | V | - V IDLE-+ - FULL | - | | - V V - CLOSED - """ - - PENDING = 0 # Connection not yet acquired. - READY = 1 # Re-acquired from pool, about to send a request. - ACTIVE = 2 # Active requests. - FULL = 3 # Active requests, no more stream IDs available. - IDLE = 4 # No active requests. - CLOSED = 5 # Connection closed. - - -class AsyncByteStream: - """ - The base interface for request and response bodies. - - Concrete implementations should subclass this class, and implement - the :meth:`__aiter__` method, and optionally the :meth:`aclose` method. - """ - - async def __aiter__(self) -> AsyncIterator[bytes]: - """ - Yield bytes representing the request or response body. - """ - yield b"" # pragma: nocover - - async def aclose(self) -> None: - """ - Must be called by the client to indicate that the stream has been closed. - """ - pass # pragma: nocover - - async def aread(self) -> bytes: - try: - return b"".join([part async for part in self]) - finally: - await self.aclose() - - -class AsyncHTTPTransport: - """ - The base interface for sending HTTP requests. - - Concrete implementations should subclass this class, and implement - the :meth:`handle_async_request` method, and optionally the :meth:`aclose` method. - """ - - async def handle_async_request( - self, - method: bytes, - url: URL, - headers: Headers, - stream: AsyncByteStream, - extensions: dict, - ) -> Tuple[int, Headers, AsyncByteStream, dict]: - """ - The interface for sending a single HTTP request, and returning a response. - - Parameters - ---------- - method: - The HTTP method, such as ``b'GET'``. - url: - The URL as a 4-tuple of (scheme, host, port, path). - headers: - Any HTTP headers to send with the request. - stream: - The body of the HTTP request. - extensions: - A dictionary of optional extensions. - - Returns - ------- - status_code: - The HTTP status code, such as ``200``. - headers: - Any HTTP headers included on the response. - stream: - The body of the HTTP response. - extensions: - A dictionary of optional extensions. - """ - raise NotImplementedError() # pragma: nocover - - async def aclose(self) -> None: - """ - Close the implementation, which should close any outstanding response streams, - and any keep alive connections. - """ - - async def __aenter__(self: T) -> T: - return self - - async def __aexit__( - self, - exc_type: Type[BaseException] = None, - exc_value: BaseException = None, - traceback: TracebackType = None, - ) -> None: - await self.aclose() diff --git a/packages/httpcore/_async/connection.py b/packages/httpcore/_async/connection.py index 2add4d857..45ee22a63 100644 --- a/packages/httpcore/_async/connection.py +++ b/packages/httpcore/_async/connection.py @@ -1,158 +1,111 @@ -from ssl import SSLContext -from typing import List, Optional, Tuple, cast - -from .._backends.auto import AsyncBackend, AsyncLock, AsyncSocketStream, AutoBackend -from .._exceptions import ConnectError, ConnectTimeout -from .._types import URL, Headers, Origin, TimeoutDict -from .._utils import exponential_backoff, get_logger, url_to_origin -from .base import AsyncByteStream, AsyncHTTPTransport, NewConnectionRequired -from .http import AsyncBaseHTTPConnection +import itertools +import logging +import ssl +from types import TracebackType +from typing import Iterable, Iterator, Optional, Type + +from .._backends.auto import AutoBackend +from .._backends.base import SOCKET_OPTION, AsyncNetworkBackend, AsyncNetworkStream +from .._exceptions import ConnectError, ConnectionNotAvailable, ConnectTimeout +from .._models import Origin, Request, Response +from .._ssl import default_ssl_context +from .._synchronization import AsyncLock +from .._trace import Trace from .http11 import AsyncHTTP11Connection - -logger = get_logger(__name__) +from .interfaces import AsyncConnectionInterface RETRIES_BACKOFF_FACTOR = 0.5 # 0s, 0.5s, 1s, 2s, 4s, etc. -class AsyncHTTPConnection(AsyncHTTPTransport): +logger = logging.getLogger("httpcore.connection") + + +def exponential_backoff(factor: float) -> Iterator[float]: + """ + Generate a geometric sequence that has a ratio of 2 and starts with 0. + + For example: + - `factor = 2`: `0, 2, 4, 8, 16, 32, 64, ...` + - `factor = 3`: `0, 3, 6, 12, 24, 48, 96, ...` + """ + yield 0 + for n in itertools.count(): + yield factor * 2**n + + +class AsyncHTTPConnection(AsyncConnectionInterface): def __init__( self, origin: Origin, + ssl_context: Optional[ssl.SSLContext] = None, + keepalive_expiry: Optional[float] = None, http1: bool = True, http2: bool = False, - keepalive_expiry: float = None, - uds: str = None, - ssl_context: SSLContext = None, - socket: AsyncSocketStream = None, - local_address: str = None, retries: int = 0, - backend: AsyncBackend = None, - ): - self.origin = origin - self._http1_enabled = http1 - self._http2_enabled = http2 + local_address: Optional[str] = None, + uds: Optional[str] = None, + network_backend: Optional[AsyncNetworkBackend] = None, + socket_options: Optional[Iterable[SOCKET_OPTION]] = None, + ) -> None: + self._origin = origin + self._ssl_context = ssl_context self._keepalive_expiry = keepalive_expiry - self._uds = uds - self._ssl_context = SSLContext() if ssl_context is None else ssl_context - self.socket = socket - self._local_address = local_address + self._http1 = http1 + self._http2 = http2 self._retries = retries + self._local_address = local_address + self._uds = uds - alpn_protocols: List[str] = [] - if http1: - alpn_protocols.append("http/1.1") - if http2: - alpn_protocols.append("h2") - - self._ssl_context.set_alpn_protocols(alpn_protocols) - - self.connection: Optional[AsyncBaseHTTPConnection] = None - self._is_http11 = False - self._is_http2 = False - self._connect_failed = False - self._expires_at: Optional[float] = None - self._backend = AutoBackend() if backend is None else backend - - def __repr__(self) -> str: - return f"" - - def info(self) -> str: - if self.connection is None: - return "Connection failed" if self._connect_failed else "Connecting" - return self.connection.info() - - def should_close(self) -> bool: - """ - Return `True` if the connection is in a state where it should be closed. - This occurs when any of the following occur: - - * There are no active requests on an HTTP/1.1 connection, and the underlying - socket is readable. The only valid state the socket can be readable in - if this occurs is when the b"" EOF marker is about to be returned, - indicating a server disconnect. - * There are no active requests being made and the keepalive timeout has passed. - """ - if self.connection is None: - return False - return self.connection.should_close() - - def is_idle(self) -> bool: - """ - Return `True` if the connection is currently idle. - """ - if self.connection is None: - return False - return self.connection.is_idle() + self._network_backend: AsyncNetworkBackend = ( + AutoBackend() if network_backend is None else network_backend + ) + self._connection: Optional[AsyncConnectionInterface] = None + self._connect_failed: bool = False + self._request_lock = AsyncLock() + self._socket_options = socket_options + + async def handle_async_request(self, request: Request) -> Response: + if not self.can_handle_request(request.url.origin): + raise RuntimeError( + f"Attempted to send request to {request.url.origin} on connection to {self._origin}" + ) - def is_closed(self) -> bool: - if self.connection is None: - return self._connect_failed - return self.connection.is_closed() + async with self._request_lock: + if self._connection is None: + try: + stream = await self._connect(request) - def is_available(self) -> bool: - """ - Return `True` if the connection is currently able to accept an outgoing request. - This occurs when any of the following occur: - - * The connection has not yet been opened, and HTTP/2 support is enabled. - We don't *know* at this point if we'll end up on an HTTP/2 connection or - not, but we *might* do, so we indicate availability. - * The connection has been opened, and is currently idle. - * The connection is open, and is an HTTP/2 connection. The connection must - also not currently be exceeding the maximum number of allowable concurrent - streams and must not have exhausted the maximum total number of stream IDs. - """ - if self.connection is None: - return self._http2_enabled and not self.is_closed - return self.connection.is_available() - - @property - def request_lock(self) -> AsyncLock: - # We do this lazily, to make sure backend autodetection always - # runs within an async context. - if not hasattr(self, "_request_lock"): - self._request_lock = self._backend.create_lock() - return self._request_lock - - async def handle_async_request( - self, - method: bytes, - url: URL, - headers: Headers, - stream: AsyncByteStream, - extensions: dict, - ) -> Tuple[int, Headers, AsyncByteStream, dict]: - assert url_to_origin(url) == self.origin - timeout = cast(TimeoutDict, extensions.get("timeout", {})) - - async with self.request_lock: - if self.connection is None: - if self._connect_failed: - raise NewConnectionRequired() - if not self.socket: - logger.trace( - "open_socket origin=%r timeout=%r", self.origin, timeout + ssl_object = stream.get_extra_info("ssl_object") + http2_negotiated = ( + ssl_object is not None + and ssl_object.selected_alpn_protocol() == "h2" ) - self.socket = await self._open_socket(timeout) - self._create_connection(self.socket) - elif not self.connection.is_available(): - raise NewConnectionRequired() - - assert self.connection is not None - logger.trace( - "connection.handle_async_request method=%r url=%r headers=%r", - method, - url, - headers, - ) - return await self.connection.handle_async_request( - method, url, headers, stream, extensions - ) + if http2_negotiated or (self._http2 and not self._http1): + from .http2 import AsyncHTTP2Connection + + self._connection = AsyncHTTP2Connection( + origin=self._origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + else: + self._connection = AsyncHTTP11Connection( + origin=self._origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + except Exception as exc: + self._connect_failed = True + raise exc + elif not self._connection.is_available(): + raise ConnectionNotAvailable() - async def _open_socket(self, timeout: TimeoutDict = None) -> AsyncSocketStream: - scheme, hostname, port = self.origin - timeout = {} if timeout is None else timeout - ssl_context = self._ssl_context if scheme == b"https" else None + return await self._connection.handle_async_request(request) + + async def _connect(self, request: Request) -> AsyncNetworkStream: + timeouts = request.extensions.get("timeout", {}) + sni_hostname = request.extensions.get("sni_hostname", None) + timeout = timeouts.get("connect", None) retries_left = self._retries delays = exponential_backoff(factor=RETRIES_BACKOFF_FACTOR) @@ -160,61 +113,110 @@ async def _open_socket(self, timeout: TimeoutDict = None) -> AsyncSocketStream: while True: try: if self._uds is None: - return await self._backend.open_tcp_stream( - hostname, - port, - ssl_context, - timeout, - local_address=self._local_address, - ) + kwargs = { + "host": self._origin.host.decode("ascii"), + "port": self._origin.port, + "local_address": self._local_address, + "timeout": timeout, + "socket_options": self._socket_options, + } + async with Trace("connect_tcp", logger, request, kwargs) as trace: + stream = await self._network_backend.connect_tcp(**kwargs) + trace.return_value = stream else: - return await self._backend.open_uds_stream( - self._uds, hostname, ssl_context, timeout + kwargs = { + "path": self._uds, + "timeout": timeout, + "socket_options": self._socket_options, + } + async with Trace( + "connect_unix_socket", logger, request, kwargs + ) as trace: + stream = await self._network_backend.connect_unix_socket( + **kwargs + ) + trace.return_value = stream + + if self._origin.scheme == b"https": + ssl_context = ( + default_ssl_context() + if self._ssl_context is None + else self._ssl_context ) + alpn_protocols = ["http/1.1", "h2"] if self._http2 else ["http/1.1"] + ssl_context.set_alpn_protocols(alpn_protocols) + + kwargs = { + "ssl_context": ssl_context, + "server_hostname": sni_hostname + or self._origin.host.decode("ascii"), + "timeout": timeout, + } + async with Trace("start_tls", logger, request, kwargs) as trace: + stream = await stream.start_tls(**kwargs) + trace.return_value = stream + return stream except (ConnectError, ConnectTimeout): if retries_left <= 0: - self._connect_failed = True raise retries_left -= 1 delay = next(delays) - await self._backend.sleep(delay) - except Exception: # noqa: PIE786 - self._connect_failed = True - raise - - def _create_connection(self, socket: AsyncSocketStream) -> None: - http_version = socket.get_http_version() - logger.trace( - "create_connection socket=%r http_version=%r", socket, http_version - ) - if http_version == "HTTP/2" or ( - self._http2_enabled and not self._http1_enabled - ): - from .http2 import AsyncHTTP2Connection - - self._is_http2 = True - self.connection = AsyncHTTP2Connection( - socket=socket, - keepalive_expiry=self._keepalive_expiry, - backend=self._backend, - ) - else: - self._is_http11 = True - self.connection = AsyncHTTP11Connection( - socket=socket, keepalive_expiry=self._keepalive_expiry - ) + async with Trace("retry", logger, request, kwargs) as trace: + await self._network_backend.sleep(delay) - async def start_tls( - self, hostname: bytes, ssl_context: SSLContext, timeout: TimeoutDict = None - ) -> None: - if self.connection is not None: - logger.trace("start_tls hostname=%r timeout=%r", hostname, timeout) - self.socket = await self.connection.start_tls( - hostname, ssl_context, timeout - ) - logger.trace("start_tls complete hostname=%r timeout=%r", hostname, timeout) + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._origin async def aclose(self) -> None: - async with self.request_lock: - if self.connection is not None: - await self.connection.aclose() + if self._connection is not None: + async with Trace("close", logger, None, {}): + await self._connection.aclose() + + def is_available(self) -> bool: + if self._connection is None: + # If HTTP/2 support is enabled, and the resulting connection could + # end up as HTTP/2 then we should indicate the connection as being + # available to service multiple requests. + return ( + self._http2 + and (self._origin.scheme == b"https" or not self._http1) + and not self._connect_failed + ) + return self._connection.is_available() + + def has_expired(self) -> bool: + if self._connection is None: + return self._connect_failed + return self._connection.has_expired() + + def is_idle(self) -> bool: + if self._connection is None: + return self._connect_failed + return self._connection.is_idle() + + def is_closed(self) -> bool: + if self._connection is None: + return self._connect_failed + return self._connection.is_closed() + + def info(self) -> str: + if self._connection is None: + return "CONNECTION FAILED" if self._connect_failed else "CONNECTING" + return self._connection.info() + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} [{self.info()}]>" + + # These context managers are not used in the standard flow, but are + # useful for testing or working with connection instances directly. + + async def __aenter__(self) -> "AsyncHTTPConnection": + return self + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]] = None, + exc_value: Optional[BaseException] = None, + traceback: Optional[TracebackType] = None, + ) -> None: + await self.aclose() diff --git a/packages/httpcore/_async/connection_pool.py b/packages/httpcore/_async/connection_pool.py index f86c2277c..ddc0510e6 100644 --- a/packages/httpcore/_async/connection_pool.py +++ b/packages/httpcore/_async/connection_pool.py @@ -1,365 +1,356 @@ -import warnings -from ssl import SSLContext -from typing import ( - AsyncIterator, - Callable, - Dict, - List, - Optional, - Set, - Tuple, - Union, - cast, -) - -from .._backends.auto import AsyncBackend, AsyncLock, AsyncSemaphore -from .._backends.base import lookup_async_backend -from .._exceptions import LocalProtocolError, PoolTimeout, UnsupportedProtocol -from .._threadlock import ThreadLock -from .._types import URL, Headers, Origin, TimeoutDict -from .._utils import get_logger, origin_to_url_string, url_to_origin -from .base import AsyncByteStream, AsyncHTTPTransport, NewConnectionRequired +import ssl +import sys +from types import TracebackType +from typing import AsyncIterable, AsyncIterator, Iterable, List, Optional, Type + +from .._backends.auto import AutoBackend +from .._backends.base import SOCKET_OPTION, AsyncNetworkBackend +from .._exceptions import ConnectionNotAvailable, UnsupportedProtocol +from .._models import Origin, Request, Response +from .._synchronization import AsyncEvent, AsyncLock, AsyncShieldCancellation from .connection import AsyncHTTPConnection +from .interfaces import AsyncConnectionInterface, AsyncRequestInterface -logger = get_logger(__name__) +class RequestStatus: + def __init__(self, request: Request): + self.request = request + self.connection: Optional[AsyncConnectionInterface] = None + self._connection_acquired = AsyncEvent() -class NullSemaphore(AsyncSemaphore): - def __init__(self) -> None: - pass - - async def acquire(self, timeout: float = None) -> None: - return - - async def release(self) -> None: - return - - -class ResponseByteStream(AsyncByteStream): - def __init__( - self, - stream: AsyncByteStream, - connection: AsyncHTTPConnection, - callback: Callable, - ) -> None: - """ - A wrapper around the response stream that we return from - `.handle_async_request()`. - - Ensures that when `stream.aclose()` is called, the connection pool - is notified via a callback. - """ - self.stream = stream + def set_connection(self, connection: AsyncConnectionInterface) -> None: + assert self.connection is None self.connection = connection - self.callback = callback + self._connection_acquired.set() - async def __aiter__(self) -> AsyncIterator[bytes]: - async for chunk in self.stream: - yield chunk + def unset_connection(self) -> None: + assert self.connection is not None + self.connection = None + self._connection_acquired = AsyncEvent() - async def aclose(self) -> None: - try: - # Call the underlying stream close callback. - # This will be a call to `AsyncHTTP11Connection._response_closed()` - # or `AsyncHTTP2Stream._response_closed()`. - await self.stream.aclose() - finally: - # Call the connection pool close callback. - # This will be a call to `AsyncConnectionPool._response_closed()`. - await self.callback(self.connection) + async def wait_for_connection( + self, timeout: Optional[float] = None + ) -> AsyncConnectionInterface: + if self.connection is None: + await self._connection_acquired.wait(timeout=timeout) + assert self.connection is not None + return self.connection -class AsyncConnectionPool(AsyncHTTPTransport): +class AsyncConnectionPool(AsyncRequestInterface): """ A connection pool for making HTTP requests. - - Parameters - ---------- - ssl_context: - An SSL context to use for verifying connections. - max_connections: - The maximum number of concurrent connections to allow. - max_keepalive_connections: - The maximum number of connections to allow before closing keep-alive - connections. - keepalive_expiry: - The maximum time to allow before closing a keep-alive connection. - http1: - Enable/Disable HTTP/1.1 support. Defaults to True. - http2: - Enable/Disable HTTP/2 support. Defaults to False. - uds: - Path to a Unix Domain Socket to use instead of TCP sockets. - local_address: - Local address to connect from. Can also be used to connect using a particular - address family. Using ``local_address="0.0.0.0"`` will connect using an - ``AF_INET`` address (IPv4), while using ``local_address="::"`` will connect - using an ``AF_INET6`` address (IPv6). - retries: - The maximum number of retries when trying to establish a connection. - backend: - A name indicating which concurrency backend to use. """ def __init__( self, - ssl_context: SSLContext = None, - max_connections: int = None, - max_keepalive_connections: int = None, - keepalive_expiry: float = None, + ssl_context: Optional[ssl.SSLContext] = None, + max_connections: Optional[int] = 10, + max_keepalive_connections: Optional[int] = None, + keepalive_expiry: Optional[float] = None, http1: bool = True, http2: bool = False, - uds: str = None, - local_address: str = None, retries: int = 0, - max_keepalive: int = None, - backend: Union[AsyncBackend, str] = "auto", - ): - if max_keepalive is not None: - warnings.warn( - "'max_keepalive' is deprecated. Use 'max_keepalive_connections'.", - DeprecationWarning, - ) - max_keepalive_connections = max_keepalive + local_address: Optional[str] = None, + uds: Optional[str] = None, + network_backend: Optional[AsyncNetworkBackend] = None, + socket_options: Optional[Iterable[SOCKET_OPTION]] = None, + ) -> None: + """ + A connection pool for making HTTP requests. + + Parameters: + ssl_context: An SSL context to use for verifying connections. + If not specified, the default `httpcore.default_ssl_context()` + will be used. + max_connections: The maximum number of concurrent HTTP connections that + the pool should allow. Any attempt to send a request on a pool that + would exceed this amount will block until a connection is available. + max_keepalive_connections: The maximum number of idle HTTP connections + that will be maintained in the pool. + keepalive_expiry: The duration in seconds that an idle HTTP connection + may be maintained for before being expired from the pool. + http1: A boolean indicating if HTTP/1.1 requests should be supported + by the connection pool. Defaults to True. + http2: A boolean indicating if HTTP/2 requests should be supported by + the connection pool. Defaults to False. + retries: The maximum number of retries when trying to establish a + connection. + local_address: Local address to connect from. Can also be used to connect + using a particular address family. Using `local_address="0.0.0.0"` + will connect using an `AF_INET` address (IPv4), while using + `local_address="::"` will connect using an `AF_INET6` address (IPv6). + uds: Path to a Unix Domain Socket to use instead of TCP sockets. + network_backend: A backend instance to use for handling network I/O. + socket_options: Socket options that have to be included + in the TCP socket when the connection was established. + """ + self._ssl_context = ssl_context - if isinstance(backend, str): - backend = lookup_async_backend(backend) + self._max_connections = ( + sys.maxsize if max_connections is None else max_connections + ) + self._max_keepalive_connections = ( + sys.maxsize + if max_keepalive_connections is None + else max_keepalive_connections + ) + self._max_keepalive_connections = min( + self._max_connections, self._max_keepalive_connections + ) - self._ssl_context = SSLContext() if ssl_context is None else ssl_context - self._max_connections = max_connections - self._max_keepalive_connections = max_keepalive_connections self._keepalive_expiry = keepalive_expiry self._http1 = http1 self._http2 = http2 - self._uds = uds - self._local_address = local_address self._retries = retries - self._connections: Dict[Origin, Set[AsyncHTTPConnection]] = {} - self._thread_lock = ThreadLock() - self._backend = backend - self._next_keepalive_check = 0.0 - - if not (http1 or http2): - raise ValueError("Either http1 or http2 must be True.") - - if http2: - try: - import h2 # noqa: F401 - except ImportError: - raise ImportError( - "Attempted to use http2=True, but the 'h2' " - "package is not installed. Use 'pip install httpcore[http2]'." - ) - - @property - def _connection_semaphore(self) -> AsyncSemaphore: - # We do this lazily, to make sure backend autodetection always - # runs within an async context. - if not hasattr(self, "_internal_semaphore"): - if self._max_connections is not None: - self._internal_semaphore = self._backend.create_semaphore( - self._max_connections, exc_class=PoolTimeout - ) - else: - self._internal_semaphore = NullSemaphore() - - return self._internal_semaphore + self._local_address = local_address + self._uds = uds - @property - def _connection_acquiry_lock(self) -> AsyncLock: - if not hasattr(self, "_internal_connection_acquiry_lock"): - self._internal_connection_acquiry_lock = self._backend.create_lock() - return self._internal_connection_acquiry_lock + self._pool: List[AsyncConnectionInterface] = [] + self._requests: List[RequestStatus] = [] + self._pool_lock = AsyncLock() + self._network_backend = ( + AutoBackend() if network_backend is None else network_backend + ) + self._socket_options = socket_options - def _create_connection( - self, - origin: Tuple[bytes, bytes, int], - ) -> AsyncHTTPConnection: + def create_connection(self, origin: Origin) -> AsyncConnectionInterface: return AsyncHTTPConnection( origin=origin, + ssl_context=self._ssl_context, + keepalive_expiry=self._keepalive_expiry, http1=self._http1, http2=self._http2, - keepalive_expiry=self._keepalive_expiry, - uds=self._uds, - ssl_context=self._ssl_context, - local_address=self._local_address, retries=self._retries, - backend=self._backend, + local_address=self._local_address, + uds=self._uds, + network_backend=self._network_backend, + socket_options=self._socket_options, ) - async def handle_async_request( - self, - method: bytes, - url: URL, - headers: Headers, - stream: AsyncByteStream, - extensions: dict, - ) -> Tuple[int, Headers, AsyncByteStream, dict]: - if url[0] not in (b"http", b"https"): - scheme = url[0].decode("latin-1") - host = url[1].decode("latin-1") - if scheme == "": - raise UnsupportedProtocol( - f"The request to '://{host}/' is missing either an 'http://' \ - or 'https://' protocol." - ) - else: - raise UnsupportedProtocol( - f"The request to '{scheme}://{host}' has \ - an unsupported protocol {scheme!r}" - ) - - if not url[1]: - raise LocalProtocolError("Missing hostname in URL.") - - origin = url_to_origin(url) - timeout = cast(TimeoutDict, extensions.get("timeout", {})) - - await self._keepalive_sweep() - - connection: Optional[AsyncHTTPConnection] = None - while connection is None: - async with self._connection_acquiry_lock: - # We get-or-create a connection as an atomic operation, to ensure - # that HTTP/2 requests issued in close concurrency will end up - # on the same connection. - logger.trace("get_connection_from_pool=%r", origin) - connection = await self._get_connection_from_pool(origin) - - if connection is None: - connection = self._create_connection(origin=origin) - logger.trace("created connection=%r", connection) - await self._add_to_pool(connection, timeout=timeout) - else: - logger.trace("reuse connection=%r", connection) + @property + def connections(self) -> List[AsyncConnectionInterface]: + """ + Return a list of the connections currently in the pool. + + For example: + + ```python + >>> pool.connections + [ + , + , + , + ] + ``` + """ + return list(self._pool) - try: - response = await connection.handle_async_request( - method, url, headers=headers, stream=stream, extensions=extensions - ) - except NewConnectionRequired: - connection = None - except BaseException: # noqa: PIE786 - # See https://github.com/encode/httpcore/pull/305 for motivation - # behind catching 'BaseException' rather than 'Exception' here. - logger.trace("remove from pool connection=%r", connection) - await self._remove_from_pool(connection) - raise - - status_code, headers, stream, extensions = response - wrapped_stream = ResponseByteStream( - stream, connection=connection, callback=self._response_closed - ) - return status_code, headers, wrapped_stream, extensions - - async def _get_connection_from_pool( - self, origin: Origin - ) -> Optional[AsyncHTTPConnection]: - # Determine expired keep alive connections on this origin. - reuse_connection = None - connections_to_close = set() - - for connection in self._connections_for_origin(origin): - if connection.should_close(): - connections_to_close.add(connection) - await self._remove_from_pool(connection) - elif connection.is_available(): - reuse_connection = connection - - # Close any dropped connections. - for connection in connections_to_close: - await connection.aclose() - - return reuse_connection - - async def _response_closed(self, connection: AsyncHTTPConnection) -> None: - remove_from_pool = False - close_connection = False - - if connection.is_closed(): - remove_from_pool = True - elif connection.is_idle(): - num_connections = len(self._get_all_connections()) - if ( - self._max_keepalive_connections is not None - and num_connections > self._max_keepalive_connections - ): - remove_from_pool = True - close_connection = True - - if remove_from_pool: - await self._remove_from_pool(connection) - - if close_connection: - await connection.aclose() - - async def _keepalive_sweep(self) -> None: + async def _attempt_to_acquire_connection(self, status: RequestStatus) -> bool: + """ + Attempt to provide a connection that can handle the given origin. + """ + origin = status.request.url.origin + + # If there are queued requests in front of us, then don't acquire a + # connection. We handle requests strictly in order. + waiting = [s for s in self._requests if s.connection is None] + if waiting and waiting[0] is not status: + return False + + # Reuse an existing connection if one is currently available. + for idx, connection in enumerate(self._pool): + if connection.can_handle_request(origin) and connection.is_available(): + self._pool.pop(idx) + self._pool.insert(0, connection) + status.set_connection(connection) + return True + + # If the pool is currently full, attempt to close one idle connection. + if len(self._pool) >= self._max_connections: + for idx, connection in reversed(list(enumerate(self._pool))): + if connection.is_idle(): + await connection.aclose() + self._pool.pop(idx) + break + + # If the pool is still full, then we cannot acquire a connection. + if len(self._pool) >= self._max_connections: + return False + + # Otherwise create a new connection. + connection = self.create_connection(origin) + self._pool.insert(0, connection) + status.set_connection(connection) + return True + + async def _close_expired_connections(self) -> None: """ - Remove any IDLE connections that have expired past their keep-alive time. + Clean up the connection pool by closing off any connections that have expired. """ - if self._keepalive_expiry is None: - return + # Close any connections that have expired their keep-alive time. + for idx, connection in reversed(list(enumerate(self._pool))): + if connection.has_expired(): + await connection.aclose() + self._pool.pop(idx) + + # If the pool size exceeds the maximum number of allowed keep-alive connections, + # then close off idle connections as required. + pool_size = len(self._pool) + for idx, connection in reversed(list(enumerate(self._pool))): + if connection.is_idle() and pool_size > self._max_keepalive_connections: + await connection.aclose() + self._pool.pop(idx) + pool_size -= 1 + + async def handle_async_request(self, request: Request) -> Response: + """ + Send an HTTP request, and return an HTTP response. - now = await self._backend.time() - if now < self._next_keepalive_check: - return + This is the core implementation that is called into by `.request()` or `.stream()`. + """ + scheme = request.url.scheme.decode() + if scheme == "": + raise UnsupportedProtocol( + "Request URL is missing an 'http://' or 'https://' protocol." + ) + if scheme not in ("http", "https", "ws", "wss"): + raise UnsupportedProtocol( + f"Request URL has an unsupported protocol '{scheme}://'." + ) - self._next_keepalive_check = now + min(1.0, self._keepalive_expiry) - connections_to_close = set() + status = RequestStatus(request) - for connection in self._get_all_connections(): - if connection.should_close(): - connections_to_close.add(connection) - await self._remove_from_pool(connection) + async with self._pool_lock: + self._requests.append(status) + await self._close_expired_connections() + await self._attempt_to_acquire_connection(status) - for connection in connections_to_close: - await connection.aclose() + while True: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("pool", None) + try: + connection = await status.wait_for_connection(timeout=timeout) + except BaseException as exc: + # If we timeout here, or if the task is cancelled, then make + # sure to remove the request from the queue before bubbling + # up the exception. + async with self._pool_lock: + # Ensure only remove when task exists. + if status in self._requests: + self._requests.remove(status) + raise exc - async def _add_to_pool( - self, connection: AsyncHTTPConnection, timeout: TimeoutDict - ) -> None: - logger.trace("adding connection to pool=%r", connection) - await self._connection_semaphore.acquire(timeout=timeout.get("pool", None)) - async with self._thread_lock: - self._connections.setdefault(connection.origin, set()) - self._connections[connection.origin].add(connection) - - async def _remove_from_pool(self, connection: AsyncHTTPConnection) -> None: - logger.trace("removing connection from pool=%r", connection) - async with self._thread_lock: - if connection in self._connections.get(connection.origin, set()): - await self._connection_semaphore.release() - self._connections[connection.origin].remove(connection) - if not self._connections[connection.origin]: - del self._connections[connection.origin] - - def _connections_for_origin(self, origin: Origin) -> Set[AsyncHTTPConnection]: - return set(self._connections.get(origin, set())) - - def _get_all_connections(self) -> Set[AsyncHTTPConnection]: - connections: Set[AsyncHTTPConnection] = set() - for connection_set in self._connections.values(): - connections |= connection_set - return connections + try: + response = await connection.handle_async_request(request) + except ConnectionNotAvailable: + # The ConnectionNotAvailable exception is a special case, that + # indicates we need to retry the request on a new connection. + # + # The most common case where this can occur is when multiple + # requests are queued waiting for a single connection, which + # might end up as an HTTP/2 connection, but which actually ends + # up as HTTP/1.1. + async with self._pool_lock: + # Maintain our position in the request queue, but reset the + # status so that the request becomes queued again. + status.unset_connection() + await self._attempt_to_acquire_connection(status) + except BaseException as exc: + with AsyncShieldCancellation(): + await self.response_closed(status) + raise exc + else: + break + + # When we return the response, we wrap the stream in a special class + # that handles notifying the connection pool once the response + # has been released. + assert isinstance(response.stream, AsyncIterable) + return Response( + status=response.status, + headers=response.headers, + content=ConnectionPoolByteStream(response.stream, self, status), + extensions=response.extensions, + ) - async def aclose(self) -> None: - connections = self._get_all_connections() - for connection in connections: - await self._remove_from_pool(connection) + async def response_closed(self, status: RequestStatus) -> None: + """ + This method acts as a callback once the request/response cycle is complete. - # Close all connections - for connection in connections: - await connection.aclose() + It is called into from the `ConnectionPoolByteStream.aclose()` method. + """ + assert status.connection is not None + connection = status.connection + + async with self._pool_lock: + # Update the state of the connection pool. + if status in self._requests: + self._requests.remove(status) + + if connection.is_closed() and connection in self._pool: + self._pool.remove(connection) + + # Since we've had a response closed, it's possible we'll now be able + # to service one or more requests that are currently pending. + for status in self._requests: + if status.connection is None: + acquired = await self._attempt_to_acquire_connection(status) + # If we could not acquire a connection for a queued request + # then we don't need to check anymore requests that are + # queued later behind it. + if not acquired: + break + + # Housekeeping. + await self._close_expired_connections() - async def get_connection_info(self) -> Dict[str, List[str]]: + async def aclose(self) -> None: """ - Returns a dict of origin URLs to a list of summary strings for each connection. + Close any connections in the pool. """ - await self._keepalive_sweep() + async with self._pool_lock: + for connection in self._pool: + await connection.aclose() + self._pool = [] + self._requests = [] - stats = {} - for origin, connections in self._connections.items(): - stats[origin_to_url_string(origin)] = sorted( - [connection.info() for connection in connections] - ) - return stats + async def __aenter__(self) -> "AsyncConnectionPool": + return self + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]] = None, + exc_value: Optional[BaseException] = None, + traceback: Optional[TracebackType] = None, + ) -> None: + await self.aclose() + + +class ConnectionPoolByteStream: + """ + A wrapper around the response byte stream, that additionally handles + notifying the connection pool when the response has been closed. + """ + + def __init__( + self, + stream: AsyncIterable[bytes], + pool: AsyncConnectionPool, + status: RequestStatus, + ) -> None: + self._stream = stream + self._pool = pool + self._status = status + + async def __aiter__(self) -> AsyncIterator[bytes]: + async for part in self._stream: + yield part + + async def aclose(self) -> None: + try: + if hasattr(self._stream, "aclose"): + await self._stream.aclose() + finally: + with AsyncShieldCancellation(): + await self._pool.response_closed(self._status) diff --git a/packages/httpcore/_async/http.py b/packages/httpcore/_async/http.py deleted file mode 100644 index 06270f0f0..000000000 --- a/packages/httpcore/_async/http.py +++ /dev/null @@ -1,42 +0,0 @@ -from ssl import SSLContext - -from .._backends.auto import AsyncSocketStream -from .._types import TimeoutDict -from .base import AsyncHTTPTransport - - -class AsyncBaseHTTPConnection(AsyncHTTPTransport): - def info(self) -> str: - raise NotImplementedError() # pragma: nocover - - def should_close(self) -> bool: - """ - Return `True` if the connection is in a state where it should be closed. - """ - raise NotImplementedError() # pragma: nocover - - def is_idle(self) -> bool: - """ - Return `True` if the connection is currently idle. - """ - raise NotImplementedError() # pragma: nocover - - def is_closed(self) -> bool: - """ - Return `True` if the connection has been closed. - """ - raise NotImplementedError() # pragma: nocover - - def is_available(self) -> bool: - """ - Return `True` if the connection is currently able to accept an outgoing request. - """ - raise NotImplementedError() # pragma: nocover - - async def start_tls( - self, hostname: bytes, ssl_context: SSLContext, timeout: TimeoutDict = None - ) -> AsyncSocketStream: - """ - Upgrade the underlying socket to TLS. - """ - raise NotImplementedError() # pragma: nocover diff --git a/packages/httpcore/_async/http11.py b/packages/httpcore/_async/http11.py index a265657c6..32fa3a6f2 100644 --- a/packages/httpcore/_async/http11.py +++ b/packages/httpcore/_async/http11.py @@ -1,194 +1,186 @@ import enum +import logging import time -from ssl import SSLContext -from typing import AsyncIterator, List, Optional, Tuple, Union, cast +from types import TracebackType +from typing import ( + AsyncIterable, + AsyncIterator, + List, + Optional, + Tuple, + Type, + Union, + cast, +) import h11 -from .._backends.auto import AsyncSocketStream -from .._bytestreams import AsyncIteratorByteStream -from .._exceptions import LocalProtocolError, RemoteProtocolError, map_exceptions -from .._types import URL, Headers, TimeoutDict -from .._utils import get_logger -from .base import AsyncByteStream, NewConnectionRequired -from .http import AsyncBaseHTTPConnection +from .._backends.base import AsyncNetworkStream +from .._exceptions import ( + ConnectionNotAvailable, + LocalProtocolError, + RemoteProtocolError, + WriteError, + map_exceptions, +) +from .._models import Origin, Request, Response +from .._synchronization import AsyncLock, AsyncShieldCancellation +from .._trace import Trace +from .interfaces import AsyncConnectionInterface -H11Event = Union[ +logger = logging.getLogger("httpcore.http11") + + +# A subset of `h11.Event` types supported by `_send_event` +H11SendEvent = Union[ h11.Request, - h11.Response, - h11.InformationalResponse, h11.Data, h11.EndOfMessage, - h11.ConnectionClosed, ] -class ConnectionState(enum.IntEnum): +class HTTPConnectionState(enum.IntEnum): NEW = 0 ACTIVE = 1 IDLE = 2 CLOSED = 3 -logger = get_logger(__name__) - - -class AsyncHTTP11Connection(AsyncBaseHTTPConnection): +class AsyncHTTP11Connection(AsyncConnectionInterface): READ_NUM_BYTES = 64 * 1024 + MAX_INCOMPLETE_EVENT_SIZE = 100 * 1024 - def __init__(self, socket: AsyncSocketStream, keepalive_expiry: float = None): - self.socket = socket - + def __init__( + self, + origin: Origin, + stream: AsyncNetworkStream, + keepalive_expiry: Optional[float] = None, + ) -> None: + self._origin = origin + self._network_stream = stream self._keepalive_expiry: Optional[float] = keepalive_expiry - self._should_expire_at: Optional[float] = None - self._h11_state = h11.Connection(our_role=h11.CLIENT) - self._state = ConnectionState.NEW - - def __repr__(self) -> str: - return f"" - - def _now(self) -> float: - return time.monotonic() - - def _server_disconnected(self) -> bool: - """ - Return True if the connection is idle, and the underlying socket is readable. - The only valid state the socket can be readable here is when the b"" - EOF marker is about to be returned, indicating a server disconnect. - """ - return self._state == ConnectionState.IDLE and self.socket.is_readable() - - def _keepalive_expired(self) -> bool: - """ - Return True if the connection is idle, and has passed it's keepalive - expiry time. - """ - return ( - self._state == ConnectionState.IDLE - and self._should_expire_at is not None - and self._now() >= self._should_expire_at + self._expire_at: Optional[float] = None + self._state = HTTPConnectionState.NEW + self._state_lock = AsyncLock() + self._request_count = 0 + self._h11_state = h11.Connection( + our_role=h11.CLIENT, + max_incomplete_event_size=self.MAX_INCOMPLETE_EVENT_SIZE, ) - def info(self) -> str: - return f"HTTP/1.1, {self._state.name}" - - def should_close(self) -> bool: - """ - Return `True` if the connection is in a state where it should be closed. - """ - return self._server_disconnected() or self._keepalive_expired() - - def is_idle(self) -> bool: - """ - Return `True` if the connection is currently idle. - """ - return self._state == ConnectionState.IDLE - - def is_closed(self) -> bool: - """ - Return `True` if the connection has been closed. - """ - return self._state == ConnectionState.CLOSED - - def is_available(self) -> bool: - """ - Return `True` if the connection is currently able to accept an outgoing request. - """ - return self._state == ConnectionState.IDLE + async def handle_async_request(self, request: Request) -> Response: + if not self.can_handle_request(request.url.origin): + raise RuntimeError( + f"Attempted to send request to {request.url.origin} on connection " + f"to {self._origin}" + ) + + async with self._state_lock: + if self._state in (HTTPConnectionState.NEW, HTTPConnectionState.IDLE): + self._request_count += 1 + self._state = HTTPConnectionState.ACTIVE + self._expire_at = None + else: + raise ConnectionNotAvailable() + + try: + kwargs = {"request": request} + try: + async with Trace( + "send_request_headers", logger, request, kwargs + ) as trace: + await self._send_request_headers(**kwargs) + async with Trace("send_request_body", logger, request, kwargs) as trace: + await self._send_request_body(**kwargs) + except WriteError: + # If we get a write error while we're writing the request, + # then we supress this error and move on to attempting to + # read the response. Servers can sometimes close the request + # pre-emptively and then respond with a well formed HTTP + # error response. + pass + + async with Trace( + "receive_response_headers", logger, request, kwargs + ) as trace: + ( + http_version, + status, + reason_phrase, + headers, + ) = await self._receive_response_headers(**kwargs) + trace.return_value = ( + http_version, + status, + reason_phrase, + headers, + ) + + return Response( + status=status, + headers=headers, + content=HTTP11ConnectionByteStream(self, request), + extensions={ + "http_version": http_version, + "reason_phrase": reason_phrase, + "network_stream": self._network_stream, + }, + ) + except BaseException as exc: + with AsyncShieldCancellation(): + async with Trace("response_closed", logger, request) as trace: + await self._response_closed() + raise exc + + # Sending the request... + + async def _send_request_headers(self, request: Request) -> None: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("write", None) - async def handle_async_request( - self, - method: bytes, - url: URL, - headers: Headers, - stream: AsyncByteStream, - extensions: dict, - ) -> Tuple[int, Headers, AsyncByteStream, dict]: - """ - Send a single HTTP/1.1 request. - - Note that there is no kind of task/thread locking at this layer of interface. - Dealing with locking for concurrency is handled by the `AsyncHTTPConnection`. - """ - timeout = cast(TimeoutDict, extensions.get("timeout", {})) - - if self._state in (ConnectionState.NEW, ConnectionState.IDLE): - self._state = ConnectionState.ACTIVE - self._should_expire_at = None - else: - raise NewConnectionRequired() - - await self._send_request(method, url, headers, timeout) - await self._send_request_body(stream, timeout) - ( - http_version, - status_code, - reason_phrase, - headers, - ) = await self._receive_response(timeout) - response_stream = AsyncIteratorByteStream( - aiterator=self._receive_response_data(timeout), - aclose_func=self._response_closed, - ) - extensions = { - "http_version": http_version, - "reason_phrase": reason_phrase, - } - return (status_code, headers, response_stream, extensions) - - async def start_tls( - self, hostname: bytes, ssl_context: SSLContext, timeout: TimeoutDict = None - ) -> AsyncSocketStream: - timeout = {} if timeout is None else timeout - self.socket = await self.socket.start_tls(hostname, ssl_context, timeout) - return self.socket - - async def _send_request( - self, method: bytes, url: URL, headers: Headers, timeout: TimeoutDict - ) -> None: - """ - Send the request line and headers. - """ - logger.trace("send_request method=%r url=%r headers=%s", method, url, headers) - _scheme, _host, _port, target = url with map_exceptions({h11.LocalProtocolError: LocalProtocolError}): - event = h11.Request(method=method, target=target, headers=headers) - await self._send_event(event, timeout) - - async def _send_request_body( - self, stream: AsyncByteStream, timeout: TimeoutDict - ) -> None: - """ - Send the request body. - """ - # Send the request body. - async for chunk in stream: - logger.trace("send_data=Data(<%d bytes>)", len(chunk)) + event = h11.Request( + method=request.method, + target=request.url.target, + headers=request.headers, + ) + await self._send_event(event, timeout=timeout) + + async def _send_request_body(self, request: Request) -> None: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("write", None) + + assert isinstance(request.stream, AsyncIterable) + async for chunk in request.stream: event = h11.Data(data=chunk) - await self._send_event(event, timeout) + await self._send_event(event, timeout=timeout) - # Finalize sending the request. - event = h11.EndOfMessage() - await self._send_event(event, timeout) + await self._send_event(h11.EndOfMessage(), timeout=timeout) - async def _send_event(self, event: H11Event, timeout: TimeoutDict) -> None: - """ - Send a single `h11` event to the network, waiting for the data to - drain before returning. - """ + async def _send_event( + self, event: h11.Event, timeout: Optional[float] = None + ) -> None: bytes_to_send = self._h11_state.send(event) - await self.socket.write(bytes_to_send, timeout) + if bytes_to_send is not None: + await self._network_stream.write(bytes_to_send, timeout=timeout) - async def _receive_response( - self, timeout: TimeoutDict + # Receiving the response... + + async def _receive_response_headers( + self, request: Request ) -> Tuple[bytes, int, bytes, List[Tuple[bytes, bytes]]]: - """ - Read the response status and headers from the network. - """ + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("read", None) + while True: - event = await self._receive_event(timeout) + event = await self._receive_event(timeout=timeout) if isinstance(event, h11.Response): break + if ( + isinstance(event, h11.InformationalResponse) + and event.status_code == 101 + ): + break http_version = b"HTTP/" + event.http_version @@ -198,31 +190,28 @@ async def _receive_response( return http_version, event.status_code, event.reason, headers - async def _receive_response_data( - self, timeout: TimeoutDict - ) -> AsyncIterator[bytes]: - """ - Read the response data from the network. - """ + async def _receive_response_body(self, request: Request) -> AsyncIterator[bytes]: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("read", None) + while True: - event = await self._receive_event(timeout) + event = await self._receive_event(timeout=timeout) if isinstance(event, h11.Data): - logger.trace("receive_event=Data(<%d bytes>)", len(event.data)) yield bytes(event.data) elif isinstance(event, (h11.EndOfMessage, h11.PAUSED)): - logger.trace("receive_event=%r", event) break - async def _receive_event(self, timeout: TimeoutDict) -> H11Event: - """ - Read a single `h11` event, reading more data from the network if needed. - """ + async def _receive_event( + self, timeout: Optional[float] = None + ) -> Union[h11.Event, Type[h11.PAUSED]]: while True: with map_exceptions({h11.RemoteProtocolError: RemoteProtocolError}): event = self._h11_state.next_event() if event is h11.NEED_DATA: - data = await self.socket.read(self.READ_NUM_BYTES, timeout) + data = await self._network_stream.read( + self.READ_NUM_BYTES, timeout=timeout + ) # If we feed this case through h11 we'll raise an exception like: # @@ -230,40 +219,125 @@ async def _receive_event(self, timeout: TimeoutDict) -> H11Event: # ConnectionClosed when role=SERVER and state=SEND_RESPONSE # # Which is accurate, but not very informative from an end-user - # perspective. Instead we handle messaging for this case distinctly. + # perspective. Instead we handle this case distinctly and treat + # it as a ConnectError. if data == b"" and self._h11_state.their_state == h11.SEND_RESPONSE: msg = "Server disconnected without sending a response." raise RemoteProtocolError(msg) self._h11_state.receive_data(data) else: - assert event is not h11.NEED_DATA - break - return event + # mypy fails to narrow the type in the above if statement above + return cast(Union[h11.Event, Type[h11.PAUSED]], event) async def _response_closed(self) -> None: - logger.trace( - "response_closed our_state=%r their_state=%r", - self._h11_state.our_state, - self._h11_state.their_state, - ) - if ( - self._h11_state.our_state is h11.DONE - and self._h11_state.their_state is h11.DONE - ): - self._h11_state.start_next_cycle() - self._state = ConnectionState.IDLE - if self._keepalive_expiry is not None: - self._should_expire_at = self._now() + self._keepalive_expiry - else: - await self.aclose() + async with self._state_lock: + if ( + self._h11_state.our_state is h11.DONE + and self._h11_state.their_state is h11.DONE + ): + self._state = HTTPConnectionState.IDLE + self._h11_state.start_next_cycle() + if self._keepalive_expiry is not None: + now = time.monotonic() + self._expire_at = now + self._keepalive_expiry + else: + await self.aclose() + + # Once the connection is no longer required... async def aclose(self) -> None: - if self._state != ConnectionState.CLOSED: - self._state = ConnectionState.CLOSED + # Note that this method unilaterally closes the connection, and does + # not have any kind of locking in place around it. + self._state = HTTPConnectionState.CLOSED + await self._network_stream.aclose() + + # The AsyncConnectionInterface methods provide information about the state of + # the connection, allowing for a connection pooling implementation to + # determine when to reuse and when to close the connection... + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._origin + + def is_available(self) -> bool: + # Note that HTTP/1.1 connections in the "NEW" state are not treated as + # being "available". The control flow which created the connection will + # be able to send an outgoing request, but the connection will not be + # acquired from the connection pool for any other request. + return self._state == HTTPConnectionState.IDLE + + def has_expired(self) -> bool: + now = time.monotonic() + keepalive_expired = self._expire_at is not None and now > self._expire_at + + # If the HTTP connection is idle but the socket is readable, then the + # only valid state is that the socket is about to return b"", indicating + # a server-initiated disconnect. + server_disconnected = ( + self._state == HTTPConnectionState.IDLE + and self._network_stream.get_extra_info("is_readable") + ) + + return keepalive_expired or server_disconnected + + def is_idle(self) -> bool: + return self._state == HTTPConnectionState.IDLE + + def is_closed(self) -> bool: + return self._state == HTTPConnectionState.CLOSED + + def info(self) -> str: + origin = str(self._origin) + return ( + f"{origin!r}, HTTP/1.1, {self._state.name}, " + f"Request Count: {self._request_count}" + ) + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + origin = str(self._origin) + return ( + f"<{class_name} [{origin!r}, {self._state.name}, " + f"Request Count: {self._request_count}]>" + ) - if self._h11_state.our_state is h11.MUST_CLOSE: - event = h11.ConnectionClosed() - self._h11_state.send(event) + # These context managers are not used in the standard flow, but are + # useful for testing or working with connection instances directly. - await self.socket.aclose() + async def __aenter__(self) -> "AsyncHTTP11Connection": + return self + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]] = None, + exc_value: Optional[BaseException] = None, + traceback: Optional[TracebackType] = None, + ) -> None: + await self.aclose() + + +class HTTP11ConnectionByteStream: + def __init__(self, connection: AsyncHTTP11Connection, request: Request) -> None: + self._connection = connection + self._request = request + self._closed = False + + async def __aiter__(self) -> AsyncIterator[bytes]: + kwargs = {"request": self._request} + try: + async with Trace("receive_response_body", logger, self._request, kwargs): + async for chunk in self._connection._receive_response_body(**kwargs): + yield chunk + except BaseException as exc: + # If we get an exception while streaming the response, + # we want to close the response (and possibly the connection) + # before raising that exception. + with AsyncShieldCancellation(): + await self.aclose() + raise exc + + async def aclose(self) -> None: + if not self._closed: + self._closed = True + async with Trace("response_closed", logger, self._request): + await self._connection._response_closed() diff --git a/packages/httpcore/_async/http2.py b/packages/httpcore/_async/http2.py index 35a4e0911..8dc776ffa 100644 --- a/packages/httpcore/_async/http2.py +++ b/packages/httpcore/_async/http2.py @@ -1,175 +1,190 @@ import enum +import logging import time -from ssl import SSLContext -from typing import AsyncIterator, Dict, List, Optional, Tuple, cast +import types +import typing +import h2.config import h2.connection import h2.events -from h2.config import H2Configuration -from h2.exceptions import NoAvailableStreamIDError -from h2.settings import SettingCodes, Settings +import h2.exceptions +import h2.settings -from .._backends.auto import AsyncBackend, AsyncLock, AsyncSemaphore, AsyncSocketStream -from .._bytestreams import AsyncIteratorByteStream -from .._exceptions import LocalProtocolError, PoolTimeout, RemoteProtocolError -from .._types import URL, Headers, TimeoutDict -from .._utils import get_logger -from .base import AsyncByteStream, NewConnectionRequired -from .http import AsyncBaseHTTPConnection +from .._backends.base import AsyncNetworkStream +from .._exceptions import ( + ConnectionNotAvailable, + LocalProtocolError, + RemoteProtocolError, +) +from .._models import Origin, Request, Response +from .._synchronization import AsyncLock, AsyncSemaphore, AsyncShieldCancellation +from .._trace import Trace +from .interfaces import AsyncConnectionInterface -logger = get_logger(__name__) +logger = logging.getLogger("httpcore.http2") -class ConnectionState(enum.IntEnum): - IDLE = 0 +def has_body_headers(request: Request) -> bool: + return any( + k.lower() == b"content-length" or k.lower() == b"transfer-encoding" + for k, v in request.headers + ) + + +class HTTPConnectionState(enum.IntEnum): ACTIVE = 1 - CLOSED = 2 + IDLE = 2 + CLOSED = 3 -class AsyncHTTP2Connection(AsyncBaseHTTPConnection): +class AsyncHTTP2Connection(AsyncConnectionInterface): READ_NUM_BYTES = 64 * 1024 - CONFIG = H2Configuration(validate_inbound_headers=False) + CONFIG = h2.config.H2Configuration(validate_inbound_headers=False) def __init__( self, - socket: AsyncSocketStream, - backend: AsyncBackend, - keepalive_expiry: float = None, + origin: Origin, + stream: AsyncNetworkStream, + keepalive_expiry: typing.Optional[float] = None, ): - self.socket = socket - - self._backend = backend + self._origin = origin + self._network_stream = stream + self._keepalive_expiry: typing.Optional[float] = keepalive_expiry self._h2_state = h2.connection.H2Connection(config=self.CONFIG) - + self._state = HTTPConnectionState.IDLE + self._expire_at: typing.Optional[float] = None + self._request_count = 0 + self._init_lock = AsyncLock() + self._state_lock = AsyncLock() + self._read_lock = AsyncLock() + self._write_lock = AsyncLock() self._sent_connection_init = False - self._streams: Dict[int, AsyncHTTP2Stream] = {} - self._events: Dict[int, List[h2.events.Event]] = {} - - self._keepalive_expiry: Optional[float] = keepalive_expiry - self._should_expire_at: Optional[float] = None - self._state = ConnectionState.ACTIVE - self._exhausted_available_stream_ids = False - - def __repr__(self) -> str: - return f"" - - def info(self) -> str: - return f"HTTP/2, {self._state.name}, {len(self._streams)} streams" - - def _now(self) -> float: - return time.monotonic() + self._used_all_stream_ids = False + self._connection_error = False + + # Mapping from stream ID to response stream events. + self._events: typing.Dict[ + int, + typing.Union[ + h2.events.ResponseReceived, + h2.events.DataReceived, + h2.events.StreamEnded, + h2.events.StreamReset, + ], + ] = {} + + # Connection terminated events are stored as state since + # we need to handle them for all streams. + self._connection_terminated: typing.Optional[ + h2.events.ConnectionTerminated + ] = None + + self._read_exception: typing.Optional[Exception] = None + self._write_exception: typing.Optional[Exception] = None + + async def handle_async_request(self, request: Request) -> Response: + if not self.can_handle_request(request.url.origin): + # This cannot occur in normal operation, since the connection pool + # will only send requests on connections that handle them. + # It's in place simply for resilience as a guard against incorrect + # usage, for anyone working directly with httpcore connections. + raise RuntimeError( + f"Attempted to send request to {request.url.origin} on connection " + f"to {self._origin}" + ) - def should_close(self) -> bool: - """ - Return `True` if the connection is currently idle, and the keepalive - timeout has passed. - """ - return ( - self._state == ConnectionState.IDLE - and self._should_expire_at is not None - and self._now() >= self._should_expire_at - ) + async with self._state_lock: + if self._state in (HTTPConnectionState.ACTIVE, HTTPConnectionState.IDLE): + self._request_count += 1 + self._expire_at = None + self._state = HTTPConnectionState.ACTIVE + else: + raise ConnectionNotAvailable() - def is_idle(self) -> bool: - """ - Return `True` if the connection is currently idle. - """ - return self._state == ConnectionState.IDLE + async with self._init_lock: + if not self._sent_connection_init: + try: + kwargs = {"request": request} + async with Trace("send_connection_init", logger, request, kwargs): + await self._send_connection_init(**kwargs) + except BaseException as exc: + with AsyncShieldCancellation(): + await self.aclose() + raise exc - def is_closed(self) -> bool: - """ - Return `True` if the connection has been closed. - """ - return self._state == ConnectionState.CLOSED + self._sent_connection_init = True - def is_available(self) -> bool: - """ - Return `True` if the connection is currently able to accept an outgoing request. - This occurs when any of the following occur: - - * The connection has not yet been opened, and HTTP/2 support is enabled. - We don't *know* at this point if we'll end up on an HTTP/2 connection or - not, but we *might* do, so we indicate availability. - * The connection has been opened, and is currently idle. - * The connection is open, and is an HTTP/2 connection. The connection must - also not have exhausted the maximum total number of stream IDs. - """ - return ( - self._state != ConnectionState.CLOSED - and not self._exhausted_available_stream_ids - ) + # Initially start with just 1 until the remote server provides + # its max_concurrent_streams value + self._max_streams = 1 - @property - def init_lock(self) -> AsyncLock: - # We do this lazily, to make sure backend autodetection always - # runs within an async context. - if not hasattr(self, "_initialization_lock"): - self._initialization_lock = self._backend.create_lock() - return self._initialization_lock - - @property - def read_lock(self) -> AsyncLock: - # We do this lazily, to make sure backend autodetection always - # runs within an async context. - if not hasattr(self, "_read_lock"): - self._read_lock = self._backend.create_lock() - return self._read_lock - - @property - def max_streams_semaphore(self) -> AsyncSemaphore: - # We do this lazily, to make sure backend autodetection always - # runs within an async context. - if not hasattr(self, "_max_streams_semaphore"): - max_streams = self._h2_state.local_settings.max_concurrent_streams - self._max_streams_semaphore = self._backend.create_semaphore( - max_streams, exc_class=PoolTimeout - ) - return self._max_streams_semaphore + local_settings_max_streams = ( + self._h2_state.local_settings.max_concurrent_streams + ) + self._max_streams_semaphore = AsyncSemaphore(local_settings_max_streams) - async def start_tls( - self, hostname: bytes, ssl_context: SSLContext, timeout: TimeoutDict = None - ) -> AsyncSocketStream: - raise NotImplementedError("TLS upgrade not supported on HTTP/2 connections.") + for _ in range(local_settings_max_streams - self._max_streams): + await self._max_streams_semaphore.acquire() - async def handle_async_request( - self, - method: bytes, - url: URL, - headers: Headers, - stream: AsyncByteStream, - extensions: dict, - ) -> Tuple[int, Headers, AsyncByteStream, dict]: - timeout = cast(TimeoutDict, extensions.get("timeout", {})) - - async with self.init_lock: - if not self._sent_connection_init: - # The very first stream is responsible for initiating the connection. - self._state = ConnectionState.ACTIVE - await self.send_connection_init(timeout) - self._sent_connection_init = True + await self._max_streams_semaphore.acquire() - await self.max_streams_semaphore.acquire() try: - try: - stream_id = self._h2_state.get_next_available_stream_id() - except NoAvailableStreamIDError: - self._exhausted_available_stream_ids = True - raise NewConnectionRequired() - else: - self._state = ConnectionState.ACTIVE - self._should_expire_at = None - - h2_stream = AsyncHTTP2Stream(stream_id=stream_id, connection=self) - self._streams[stream_id] = h2_stream + stream_id = self._h2_state.get_next_available_stream_id() self._events[stream_id] = [] - return await h2_stream.handle_async_request( - method, url, headers, stream, extensions - ) - except Exception: # noqa: PIE786 - await self.max_streams_semaphore.release() - raise + except h2.exceptions.NoAvailableStreamIDError: # pragma: nocover + self._used_all_stream_ids = True + self._request_count -= 1 + raise ConnectionNotAvailable() - async def send_connection_init(self, timeout: TimeoutDict) -> None: + try: + kwargs = {"request": request, "stream_id": stream_id} + async with Trace("send_request_headers", logger, request, kwargs): + await self._send_request_headers(request=request, stream_id=stream_id) + async with Trace("send_request_body", logger, request, kwargs): + await self._send_request_body(request=request, stream_id=stream_id) + async with Trace( + "receive_response_headers", logger, request, kwargs + ) as trace: + status, headers = await self._receive_response( + request=request, stream_id=stream_id + ) + trace.return_value = (status, headers) + + return Response( + status=status, + headers=headers, + content=HTTP2ConnectionByteStream(self, request, stream_id=stream_id), + extensions={ + "http_version": b"HTTP/2", + "network_stream": self._network_stream, + "stream_id": stream_id, + }, + ) + except BaseException as exc: # noqa: PIE786 + with AsyncShieldCancellation(): + kwargs = {"stream_id": stream_id} + async with Trace("response_closed", logger, request, kwargs): + await self._response_closed(stream_id=stream_id) + + if isinstance(exc, h2.exceptions.ProtocolError): + # One case where h2 can raise a protocol error is when a + # closed frame has been seen by the state machine. + # + # This happens when one stream is reading, and encounters + # a GOAWAY event. Other flows of control may then raise + # a protocol error at any point they interact with the 'h2_state'. + # + # In this case we'll have stored the event, and should raise + # it as a RemoteProtocolError. + if self._connection_terminated: # pragma: nocover + raise RemoteProtocolError(self._connection_terminated) + # If h2 raises a protocol error in some other state then we + # must somehow have made a protocol violation. + raise LocalProtocolError(exc) # pragma: nocover + + raise exc + + async def _send_connection_init(self, request: Request) -> None: """ The HTTP/2 connection requires some initial setup before we can start using individual request/response streams on it. @@ -177,15 +192,15 @@ async def send_connection_init(self, timeout: TimeoutDict) -> None: # Need to set these manually here instead of manipulating via # __setitem__() otherwise the H2Connection will emit SettingsUpdate # frames in addition to sending the undesired defaults. - self._h2_state.local_settings = Settings( + self._h2_state.local_settings = h2.settings.Settings( client=True, initial_values={ # Disable PUSH_PROMISE frames from the server since we don't do anything # with them for now. Maybe when we support caching? - SettingCodes.ENABLE_PUSH: 0, + h2.settings.SettingCodes.ENABLE_PUSH: 0, # These two are taken from h2 for safe defaults - SettingCodes.MAX_CONCURRENT_STREAMS: 100, - SettingCodes.MAX_HEADER_LIST_SIZE: 65536, + h2.settings.SettingCodes.MAX_CONCURRENT_STREAMS: 100, + h2.settings.SettingCodes.MAX_HEADER_LIST_SIZE: 65536, }, ) @@ -196,227 +211,85 @@ async def send_connection_init(self, timeout: TimeoutDict) -> None: h2.settings.SettingCodes.ENABLE_CONNECT_PROTOCOL ] - logger.trace("initiate_connection=%r", self) self._h2_state.initiate_connection() - self._h2_state.increment_flow_control_window(2 ** 24) - data_to_send = self._h2_state.data_to_send() - await self.socket.write(data_to_send, timeout) - - def is_socket_readable(self) -> bool: - return self.socket.is_readable() - - async def aclose(self) -> None: - logger.trace("close_connection=%r", self) - if self._state != ConnectionState.CLOSED: - self._state = ConnectionState.CLOSED - - await self.socket.aclose() - - async def wait_for_outgoing_flow(self, stream_id: int, timeout: TimeoutDict) -> int: - """ - Returns the maximum allowable outgoing flow for a given stream. - If the allowable flow is zero, then waits on the network until - WindowUpdated frames have increased the flow rate. - https://tools.ietf.org/html/rfc7540#section-6.9 - """ - local_flow = self._h2_state.local_flow_control_window(stream_id) - connection_flow = self._h2_state.max_outbound_frame_size - flow = min(local_flow, connection_flow) - while flow == 0: - await self.receive_events(timeout) - local_flow = self._h2_state.local_flow_control_window(stream_id) - connection_flow = self._h2_state.max_outbound_frame_size - flow = min(local_flow, connection_flow) - return flow + self._h2_state.increment_flow_control_window(2**24) + await self._write_outgoing_data(request) - async def wait_for_event( - self, stream_id: int, timeout: TimeoutDict - ) -> h2.events.Event: - """ - Returns the next event for a given stream. - If no events are available yet, then waits on the network until - an event is available. - """ - async with self.read_lock: - while not self._events[stream_id]: - await self.receive_events(timeout) - return self._events[stream_id].pop(0) + # Sending the request... - async def receive_events(self, timeout: TimeoutDict) -> None: + async def _send_request_headers(self, request: Request, stream_id: int) -> None: """ - Read some data from the network, and update the H2 state. + Send the request headers to a given stream ID. """ - data = await self.socket.read(self.READ_NUM_BYTES, timeout) - if data == b"": - raise RemoteProtocolError("Server disconnected") - - events = self._h2_state.receive_data(data) - for event in events: - event_stream_id = getattr(event, "stream_id", 0) - logger.trace("receive_event stream_id=%r event=%s", event_stream_id, event) - - if hasattr(event, "error_code"): - raise RemoteProtocolError(event) - - if event_stream_id in self._events: - self._events[event_stream_id].append(event) - - data_to_send = self._h2_state.data_to_send() - await self.socket.write(data_to_send, timeout) - - async def send_headers( - self, stream_id: int, headers: Headers, end_stream: bool, timeout: TimeoutDict - ) -> None: - logger.trace("send_headers stream_id=%r headers=%r", stream_id, headers) - self._h2_state.send_headers(stream_id, headers, end_stream=end_stream) - self._h2_state.increment_flow_control_window(2 ** 24, stream_id=stream_id) - data_to_send = self._h2_state.data_to_send() - await self.socket.write(data_to_send, timeout) - - async def send_data( - self, stream_id: int, chunk: bytes, timeout: TimeoutDict - ) -> None: - logger.trace("send_data stream_id=%r chunk=%r", stream_id, chunk) - self._h2_state.send_data(stream_id, chunk) - data_to_send = self._h2_state.data_to_send() - await self.socket.write(data_to_send, timeout) - - async def end_stream(self, stream_id: int, timeout: TimeoutDict) -> None: - logger.trace("end_stream stream_id=%r", stream_id) - self._h2_state.end_stream(stream_id) - data_to_send = self._h2_state.data_to_send() - await self.socket.write(data_to_send, timeout) - - async def acknowledge_received_data( - self, stream_id: int, amount: int, timeout: TimeoutDict - ) -> None: - self._h2_state.acknowledge_received_data(amount, stream_id) - data_to_send = self._h2_state.data_to_send() - await self.socket.write(data_to_send, timeout) - - async def close_stream(self, stream_id: int) -> None: - try: - logger.trace("close_stream stream_id=%r", stream_id) - del self._streams[stream_id] - del self._events[stream_id] - - if not self._streams: - if self._state == ConnectionState.ACTIVE: - if self._exhausted_available_stream_ids: - await self.aclose() - else: - self._state = ConnectionState.IDLE - if self._keepalive_expiry is not None: - self._should_expire_at = ( - self._now() + self._keepalive_expiry - ) - finally: - await self.max_streams_semaphore.release() - - -class AsyncHTTP2Stream: - def __init__(self, stream_id: int, connection: AsyncHTTP2Connection) -> None: - self.stream_id = stream_id - self.connection = connection - - async def handle_async_request( - self, - method: bytes, - url: URL, - headers: Headers, - stream: AsyncByteStream, - extensions: dict, - ) -> Tuple[int, Headers, AsyncByteStream, dict]: - headers = [(k.lower(), v) for (k, v) in headers] - timeout = cast(TimeoutDict, extensions.get("timeout", {})) - - # Send the request. - seen_headers = set(key for key, value in headers) - has_body = ( - b"content-length" in seen_headers or b"transfer-encoding" in seen_headers - ) - - await self.send_headers(method, url, headers, has_body, timeout) - if has_body: - await self.send_body(stream, timeout) - - # Receive the response. - status_code, headers = await self.receive_response(timeout) - response_stream = AsyncIteratorByteStream( - aiterator=self.body_iter(timeout), aclose_func=self._response_closed - ) - - extensions = { - "http_version": b"HTTP/2", - } - return (status_code, headers, response_stream, extensions) - - async def send_headers( - self, - method: bytes, - url: URL, - headers: Headers, - has_body: bool, - timeout: TimeoutDict, - ) -> None: - scheme, hostname, port, path = url + end_stream = not has_body_headers(request) # In HTTP/2 the ':authority' pseudo-header is used instead of 'Host'. # In order to gracefully handle HTTP/1.1 and HTTP/2 we always require # HTTP/1.1 style headers, and map them appropriately if we end up on # an HTTP/2 connection. - authority = None - - for k, v in headers: - if k == b"host": - authority = v - break - - if authority is None: - # Mirror the same error we'd see with `h11`, so that the behaviour - # is consistent. Although we're dealing with an `:authority` - # pseudo-header by this point, from an end-user perspective the issue - # is that the outgoing request needed to include a `host` header. - raise LocalProtocolError("Missing mandatory Host: header") + authority = [v for k, v in request.headers if k.lower() == b"host"][0] headers = [ - (b":method", method), + (b":method", request.method), (b":authority", authority), - (b":scheme", scheme), - (b":path", path), + (b":scheme", request.url.scheme), + (b":path", request.url.target), ] + [ - (k, v) - for k, v in headers - if k + (k.lower(), v) + for k, v in request.headers + if k.lower() not in ( b"host", b"transfer-encoding", ) ] - end_stream = not has_body - await self.connection.send_headers(self.stream_id, headers, end_stream, timeout) + self._h2_state.send_headers(stream_id, headers, end_stream=end_stream) + self._h2_state.increment_flow_control_window(2**24, stream_id=stream_id) + await self._write_outgoing_data(request) - async def send_body(self, stream: AsyncByteStream, timeout: TimeoutDict) -> None: - async for data in stream: - while data: - max_flow = await self.connection.wait_for_outgoing_flow( - self.stream_id, timeout - ) - chunk_size = min(len(data), max_flow) - chunk, data = data[:chunk_size], data[chunk_size:] - await self.connection.send_data(self.stream_id, chunk, timeout) + async def _send_request_body(self, request: Request, stream_id: int) -> None: + """ + Iterate over the request body sending it to a given stream ID. + """ + if not has_body_headers(request): + return - await self.connection.end_stream(self.stream_id, timeout) + assert isinstance(request.stream, typing.AsyncIterable) + async for data in request.stream: + await self._send_stream_data(request, stream_id, data) + await self._send_end_stream(request, stream_id) - async def receive_response( - self, timeout: TimeoutDict - ) -> Tuple[int, List[Tuple[bytes, bytes]]]: + async def _send_stream_data( + self, request: Request, stream_id: int, data: bytes + ) -> None: """ - Read the response status and headers from the network. + Send a single chunk of data in one or more data frames. + """ + while data: + max_flow = await self._wait_for_outgoing_flow(request, stream_id) + chunk_size = min(len(data), max_flow) + chunk, data = data[:chunk_size], data[chunk_size:] + self._h2_state.send_data(stream_id, chunk) + await self._write_outgoing_data(request) + + async def _send_end_stream(self, request: Request, stream_id: int) -> None: + """ + Send an empty data frame on on a given stream ID with the END_STREAM flag set. + """ + self._h2_state.end_stream(stream_id) + await self._write_outgoing_data(request) + + # Receiving the response... + + async def _receive_response( + self, request: Request, stream_id: int + ) -> typing.Tuple[int, typing.List[typing.Tuple[bytes, bytes]]]: + """ + Return the response status code and headers for a given stream ID. """ while True: - event = await self.connection.wait_for_event(self.stream_id, timeout) + event = await self._receive_stream_event(request, stream_id) if isinstance(event, h2.events.ResponseReceived): break @@ -430,17 +303,287 @@ async def receive_response( return (status_code, headers) - async def body_iter(self, timeout: TimeoutDict) -> AsyncIterator[bytes]: + async def _receive_response_body( + self, request: Request, stream_id: int + ) -> typing.AsyncIterator[bytes]: + """ + Iterator that returns the bytes of the response body for a given stream ID. + """ while True: - event = await self.connection.wait_for_event(self.stream_id, timeout) + event = await self._receive_stream_event(request, stream_id) if isinstance(event, h2.events.DataReceived): amount = event.flow_controlled_length - await self.connection.acknowledge_received_data( - self.stream_id, amount, timeout - ) + self._h2_state.acknowledge_received_data(amount, stream_id) + await self._write_outgoing_data(request) yield event.data - elif isinstance(event, (h2.events.StreamEnded, h2.events.StreamReset)): + elif isinstance(event, h2.events.StreamEnded): break - async def _response_closed(self) -> None: - await self.connection.close_stream(self.stream_id) + async def _receive_stream_event( + self, request: Request, stream_id: int + ) -> typing.Union[ + h2.events.ResponseReceived, h2.events.DataReceived, h2.events.StreamEnded + ]: + """ + Return the next available event for a given stream ID. + + Will read more data from the network if required. + """ + while not self._events.get(stream_id): + await self._receive_events(request, stream_id) + event = self._events[stream_id].pop(0) + if isinstance(event, h2.events.StreamReset): + raise RemoteProtocolError(event) + return event + + async def _receive_events( + self, request: Request, stream_id: typing.Optional[int] = None + ) -> None: + """ + Read some data from the network until we see one or more events + for a given stream ID. + """ + async with self._read_lock: + if self._connection_terminated is not None: + last_stream_id = self._connection_terminated.last_stream_id + if stream_id and last_stream_id and stream_id > last_stream_id: + self._request_count -= 1 + raise ConnectionNotAvailable() + raise RemoteProtocolError(self._connection_terminated) + + # This conditional is a bit icky. We don't want to block reading if we've + # actually got an event to return for a given stream. We need to do that + # check *within* the atomic read lock. Though it also need to be optional, + # because when we call it from `_wait_for_outgoing_flow` we *do* want to + # block until we've available flow control, event when we have events + # pending for the stream ID we're attempting to send on. + if stream_id is None or not self._events.get(stream_id): + events = await self._read_incoming_data(request) + for event in events: + if isinstance(event, h2.events.RemoteSettingsChanged): + async with Trace( + "receive_remote_settings", logger, request + ) as trace: + await self._receive_remote_settings_change(event) + trace.return_value = event + + elif isinstance( + event, + ( + h2.events.ResponseReceived, + h2.events.DataReceived, + h2.events.StreamEnded, + h2.events.StreamReset, + ), + ): + if event.stream_id in self._events: + self._events[event.stream_id].append(event) + + elif isinstance(event, h2.events.ConnectionTerminated): + self._connection_terminated = event + + await self._write_outgoing_data(request) + + async def _receive_remote_settings_change(self, event: h2.events.Event) -> None: + max_concurrent_streams = event.changed_settings.get( + h2.settings.SettingCodes.MAX_CONCURRENT_STREAMS + ) + if max_concurrent_streams: + new_max_streams = min( + max_concurrent_streams.new_value, + self._h2_state.local_settings.max_concurrent_streams, + ) + if new_max_streams and new_max_streams != self._max_streams: + while new_max_streams > self._max_streams: + await self._max_streams_semaphore.release() + self._max_streams += 1 + while new_max_streams < self._max_streams: + await self._max_streams_semaphore.acquire() + self._max_streams -= 1 + + async def _response_closed(self, stream_id: int) -> None: + await self._max_streams_semaphore.release() + del self._events[stream_id] + async with self._state_lock: + if self._connection_terminated and not self._events: + await self.aclose() + + elif self._state == HTTPConnectionState.ACTIVE and not self._events: + self._state = HTTPConnectionState.IDLE + if self._keepalive_expiry is not None: + now = time.monotonic() + self._expire_at = now + self._keepalive_expiry + if self._used_all_stream_ids: # pragma: nocover + await self.aclose() + + async def aclose(self) -> None: + # Note that this method unilaterally closes the connection, and does + # not have any kind of locking in place around it. + self._h2_state.close_connection() + self._state = HTTPConnectionState.CLOSED + await self._network_stream.aclose() + + # Wrappers around network read/write operations... + + async def _read_incoming_data( + self, request: Request + ) -> typing.List[h2.events.Event]: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("read", None) + + if self._read_exception is not None: + raise self._read_exception # pragma: nocover + + try: + data = await self._network_stream.read(self.READ_NUM_BYTES, timeout) + if data == b"": + raise RemoteProtocolError("Server disconnected") + except Exception as exc: + # If we get a network error we should: + # + # 1. Save the exception and just raise it immediately on any future reads. + # (For example, this means that a single read timeout or disconnect will + # immediately close all pending streams. Without requiring multiple + # sequential timeouts.) + # 2. Mark the connection as errored, so that we don't accept any other + # incoming requests. + self._read_exception = exc + self._connection_error = True + raise exc + + events: typing.List[h2.events.Event] = self._h2_state.receive_data(data) + + return events + + async def _write_outgoing_data(self, request: Request) -> None: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("write", None) + + async with self._write_lock: + data_to_send = self._h2_state.data_to_send() + + if self._write_exception is not None: + raise self._write_exception # pragma: nocover + + try: + await self._network_stream.write(data_to_send, timeout) + except Exception as exc: # pragma: nocover + # If we get a network error we should: + # + # 1. Save the exception and just raise it immediately on any future write. + # (For example, this means that a single write timeout or disconnect will + # immediately close all pending streams. Without requiring multiple + # sequential timeouts.) + # 2. Mark the connection as errored, so that we don't accept any other + # incoming requests. + self._write_exception = exc + self._connection_error = True + raise exc + + # Flow control... + + async def _wait_for_outgoing_flow(self, request: Request, stream_id: int) -> int: + """ + Returns the maximum allowable outgoing flow for a given stream. + + If the allowable flow is zero, then waits on the network until + WindowUpdated frames have increased the flow rate. + https://tools.ietf.org/html/rfc7540#section-6.9 + """ + local_flow: int = self._h2_state.local_flow_control_window(stream_id) + max_frame_size: int = self._h2_state.max_outbound_frame_size + flow = min(local_flow, max_frame_size) + while flow == 0: + await self._receive_events(request) + local_flow = self._h2_state.local_flow_control_window(stream_id) + max_frame_size = self._h2_state.max_outbound_frame_size + flow = min(local_flow, max_frame_size) + return flow + + # Interface for connection pooling... + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._origin + + def is_available(self) -> bool: + return ( + self._state != HTTPConnectionState.CLOSED + and not self._connection_error + and not self._used_all_stream_ids + and not ( + self._h2_state.state_machine.state + == h2.connection.ConnectionState.CLOSED + ) + ) + + def has_expired(self) -> bool: + now = time.monotonic() + return self._expire_at is not None and now > self._expire_at + + def is_idle(self) -> bool: + return self._state == HTTPConnectionState.IDLE + + def is_closed(self) -> bool: + return self._state == HTTPConnectionState.CLOSED + + def info(self) -> str: + origin = str(self._origin) + return ( + f"{origin!r}, HTTP/2, {self._state.name}, " + f"Request Count: {self._request_count}" + ) + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + origin = str(self._origin) + return ( + f"<{class_name} [{origin!r}, {self._state.name}, " + f"Request Count: {self._request_count}]>" + ) + + # These context managers are not used in the standard flow, but are + # useful for testing or working with connection instances directly. + + async def __aenter__(self) -> "AsyncHTTP2Connection": + return self + + async def __aexit__( + self, + exc_type: typing.Optional[typing.Type[BaseException]] = None, + exc_value: typing.Optional[BaseException] = None, + traceback: typing.Optional[types.TracebackType] = None, + ) -> None: + await self.aclose() + + +class HTTP2ConnectionByteStream: + def __init__( + self, connection: AsyncHTTP2Connection, request: Request, stream_id: int + ) -> None: + self._connection = connection + self._request = request + self._stream_id = stream_id + self._closed = False + + async def __aiter__(self) -> typing.AsyncIterator[bytes]: + kwargs = {"request": self._request, "stream_id": self._stream_id} + try: + async with Trace("receive_response_body", logger, self._request, kwargs): + async for chunk in self._connection._receive_response_body( + request=self._request, stream_id=self._stream_id + ): + yield chunk + except BaseException as exc: + # If we get an exception while streaming the response, + # we want to close the response (and possibly the connection) + # before raising that exception. + with AsyncShieldCancellation(): + await self.aclose() + raise exc + + async def aclose(self) -> None: + if not self._closed: + self._closed = True + kwargs = {"stream_id": self._stream_id} + async with Trace("response_closed", logger, self._request, kwargs): + await self._connection._response_closed(stream_id=self._stream_id) diff --git a/packages/httpcore/_async/http_proxy.py b/packages/httpcore/_async/http_proxy.py index 275bf214f..4aa7d8741 100644 --- a/packages/httpcore/_async/http_proxy.py +++ b/packages/httpcore/_async/http_proxy.py @@ -1,35 +1,45 @@ -from http import HTTPStatus -from ssl import SSLContext -from typing import Tuple, cast +import logging +import ssl +from base64 import b64encode +from typing import Iterable, List, Mapping, Optional, Sequence, Tuple, Union -from .._bytestreams import ByteStream +from .._backends.base import SOCKET_OPTION, AsyncNetworkBackend from .._exceptions import ProxyError -from .._types import URL, Headers, TimeoutDict -from .._utils import get_logger, url_to_origin -from .base import AsyncByteStream +from .._models import ( + URL, + Origin, + Request, + Response, + enforce_bytes, + enforce_headers, + enforce_url, +) +from .._ssl import default_ssl_context +from .._synchronization import AsyncLock +from .._trace import Trace from .connection import AsyncHTTPConnection -from .connection_pool import AsyncConnectionPool, ResponseByteStream +from .connection_pool import AsyncConnectionPool +from .http11 import AsyncHTTP11Connection +from .interfaces import AsyncConnectionInterface -logger = get_logger(__name__) +HeadersAsSequence = Sequence[Tuple[Union[bytes, str], Union[bytes, str]]] +HeadersAsMapping = Mapping[Union[bytes, str], Union[bytes, str]] -def get_reason_phrase(status_code: int) -> str: - try: - return HTTPStatus(status_code).phrase - except ValueError: - return "" +logger = logging.getLogger("httpcore.proxy") def merge_headers( - default_headers: Headers = None, override_headers: Headers = None -) -> Headers: + default_headers: Optional[Sequence[Tuple[bytes, bytes]]] = None, + override_headers: Optional[Sequence[Tuple[bytes, bytes]]] = None, +) -> List[Tuple[bytes, bytes]]: """ - Append default_headers and override_headers, de-duplicating if a key existing in - both cases. + Append default_headers and override_headers, de-duplicating if a key exists + in both cases. """ - default_headers = [] if default_headers is None else default_headers - override_headers = [] if override_headers is None else override_headers - has_override = set([key.lower() for key, value in override_headers]) + default_headers = [] if default_headers is None else list(default_headers) + override_headers = [] if override_headers is None else list(override_headers) + has_override = set(key.lower() for key, value in override_headers) default_headers = [ (key, value) for key, value in default_headers @@ -38,253 +48,321 @@ def merge_headers( return default_headers + override_headers +def build_auth_header(username: bytes, password: bytes) -> bytes: + userpass = username + b":" + password + return b"Basic " + b64encode(userpass) + + class AsyncHTTPProxy(AsyncConnectionPool): """ - A connection pool for making HTTP requests via an HTTP proxy. - - Parameters - ---------- - proxy_url: - The URL of the proxy service as a 4-tuple of (scheme, host, port, path). - proxy_headers: - A list of proxy headers to include. - proxy_mode: - A proxy mode to operate in. May be "DEFAULT", "FORWARD_ONLY", or "TUNNEL_ONLY". - ssl_context: - An SSL context to use for verifying connections. - max_connections: - The maximum number of concurrent connections to allow. - max_keepalive_connections: - The maximum number of connections to allow before closing keep-alive - connections. - http2: - Enable HTTP/2 support. + A connection pool that sends requests via an HTTP proxy. """ def __init__( self, - proxy_url: URL, - proxy_headers: Headers = None, - proxy_mode: str = "DEFAULT", - ssl_context: SSLContext = None, - max_connections: int = None, - max_keepalive_connections: int = None, - keepalive_expiry: float = None, + proxy_url: Union[URL, bytes, str], + proxy_auth: Optional[Tuple[Union[bytes, str], Union[bytes, str]]] = None, + proxy_headers: Union[HeadersAsMapping, HeadersAsSequence, None] = None, + ssl_context: Optional[ssl.SSLContext] = None, + proxy_ssl_context: Optional[ssl.SSLContext] = None, + max_connections: Optional[int] = 10, + max_keepalive_connections: Optional[int] = None, + keepalive_expiry: Optional[float] = None, + http1: bool = True, http2: bool = False, - backend: str = "auto", - # Deprecated argument style: - max_keepalive: int = None, - ): - assert proxy_mode in ("DEFAULT", "FORWARD_ONLY", "TUNNEL_ONLY") - - self.proxy_origin = url_to_origin(proxy_url) - self.proxy_headers = [] if proxy_headers is None else proxy_headers - self.proxy_mode = proxy_mode + retries: int = 0, + local_address: Optional[str] = None, + uds: Optional[str] = None, + network_backend: Optional[AsyncNetworkBackend] = None, + socket_options: Optional[Iterable[SOCKET_OPTION]] = None, + ) -> None: + """ + A connection pool for making HTTP requests. + + Parameters: + proxy_url: The URL to use when connecting to the proxy server. + For example `"http://127.0.0.1:8080/"`. + proxy_auth: Any proxy authentication as a two-tuple of + (username, password). May be either bytes or ascii-only str. + proxy_headers: Any HTTP headers to use for the proxy requests. + For example `{"Proxy-Authorization": "Basic :"}`. + ssl_context: An SSL context to use for verifying connections. + If not specified, the default `httpcore.default_ssl_context()` + will be used. + proxy_ssl_context: The same as `ssl_context`, but for a proxy server rather than a remote origin. + max_connections: The maximum number of concurrent HTTP connections that + the pool should allow. Any attempt to send a request on a pool that + would exceed this amount will block until a connection is available. + max_keepalive_connections: The maximum number of idle HTTP connections + that will be maintained in the pool. + keepalive_expiry: The duration in seconds that an idle HTTP connection + may be maintained for before being expired from the pool. + http1: A boolean indicating if HTTP/1.1 requests should be supported + by the connection pool. Defaults to True. + http2: A boolean indicating if HTTP/2 requests should be supported by + the connection pool. Defaults to False. + retries: The maximum number of retries when trying to establish + a connection. + local_address: Local address to connect from. Can also be used to + connect using a particular address family. Using + `local_address="0.0.0.0"` will connect using an `AF_INET` address + (IPv4), while using `local_address="::"` will connect using an + `AF_INET6` address (IPv6). + uds: Path to a Unix Domain Socket to use instead of TCP sockets. + network_backend: A backend instance to use for handling network I/O. + """ super().__init__( ssl_context=ssl_context, max_connections=max_connections, max_keepalive_connections=max_keepalive_connections, keepalive_expiry=keepalive_expiry, + http1=http1, http2=http2, - backend=backend, - max_keepalive=max_keepalive, + network_backend=network_backend, + retries=retries, + local_address=local_address, + uds=uds, + socket_options=socket_options, ) - async def handle_async_request( - self, - method: bytes, - url: URL, - headers: Headers, - stream: AsyncByteStream, - extensions: dict, - ) -> Tuple[int, Headers, AsyncByteStream, dict]: - if self._keepalive_expiry is not None: - await self._keepalive_sweep() - + self._proxy_url = enforce_url(proxy_url, name="proxy_url") if ( - self.proxy_mode == "DEFAULT" and url[0] == b"http" - ) or self.proxy_mode == "FORWARD_ONLY": - # By default HTTP requests should be forwarded. - logger.trace( - "forward_request proxy_origin=%r proxy_headers=%r method=%r url=%r", - self.proxy_origin, - self.proxy_headers, - method, - url, - ) - return await self._forward_request( - method, url, headers=headers, stream=stream, extensions=extensions - ) - else: - # By default HTTPS should be tunnelled. - logger.trace( - "tunnel_request proxy_origin=%r proxy_headers=%r method=%r url=%r", - self.proxy_origin, - self.proxy_headers, - method, - url, - ) - return await self._tunnel_request( - method, url, headers=headers, stream=stream, extensions=extensions + self._proxy_url.scheme == b"http" and proxy_ssl_context is not None + ): # pragma: no cover + raise RuntimeError( + "The `proxy_ssl_context` argument is not allowed for the http scheme" ) - async def _forward_request( - self, - method: bytes, - url: URL, - headers: Headers, - stream: AsyncByteStream, - extensions: dict, - ) -> Tuple[int, Headers, AsyncByteStream, dict]: - """ - Forwarded proxy requests include the entire URL as the HTTP target, - rather than just the path. - """ - timeout = cast(TimeoutDict, extensions.get("timeout", {})) - origin = self.proxy_origin - connection = await self._get_connection_from_pool(origin) - - if connection is None: - connection = AsyncHTTPConnection( - origin=origin, - http2=self._http2, + self._ssl_context = ssl_context + self._proxy_ssl_context = proxy_ssl_context + self._proxy_headers = enforce_headers(proxy_headers, name="proxy_headers") + if proxy_auth is not None: + username = enforce_bytes(proxy_auth[0], name="proxy_auth") + password = enforce_bytes(proxy_auth[1], name="proxy_auth") + authorization = build_auth_header(username, password) + self._proxy_headers = [ + (b"Proxy-Authorization", authorization) + ] + self._proxy_headers + + def create_connection(self, origin: Origin) -> AsyncConnectionInterface: + if origin.scheme == b"http": + return AsyncForwardHTTPConnection( + proxy_origin=self._proxy_url.origin, + proxy_headers=self._proxy_headers, + remote_origin=origin, keepalive_expiry=self._keepalive_expiry, - ssl_context=self._ssl_context, + network_backend=self._network_backend, + proxy_ssl_context=self._proxy_ssl_context, ) - await self._add_to_pool(connection, timeout) - - # Issue a forwarded proxy request... - - # GET https://www.example.org/path HTTP/1.1 - # [proxy headers] - # [headers] - scheme, host, port, path = url - if port is None: - target = b"%b://%b%b" % (scheme, host, path) - else: - target = b"%b://%b:%d%b" % (scheme, host, port, path) - - url = self.proxy_origin + (target,) - headers = merge_headers(self.proxy_headers, headers) - - ( - status_code, - headers, - stream, - extensions, - ) = await connection.handle_async_request( - method, url, headers=headers, stream=stream, extensions=extensions + return AsyncTunnelHTTPConnection( + proxy_origin=self._proxy_url.origin, + proxy_headers=self._proxy_headers, + remote_origin=origin, + ssl_context=self._ssl_context, + proxy_ssl_context=self._proxy_ssl_context, + keepalive_expiry=self._keepalive_expiry, + http1=self._http1, + http2=self._http2, + network_backend=self._network_backend, ) - wrapped_stream = ResponseByteStream( - stream, connection=connection, callback=self._response_closed + +class AsyncForwardHTTPConnection(AsyncConnectionInterface): + def __init__( + self, + proxy_origin: Origin, + remote_origin: Origin, + proxy_headers: Union[HeadersAsMapping, HeadersAsSequence, None] = None, + keepalive_expiry: Optional[float] = None, + network_backend: Optional[AsyncNetworkBackend] = None, + socket_options: Optional[Iterable[SOCKET_OPTION]] = None, + proxy_ssl_context: Optional[ssl.SSLContext] = None, + ) -> None: + self._connection = AsyncHTTPConnection( + origin=proxy_origin, + keepalive_expiry=keepalive_expiry, + network_backend=network_backend, + socket_options=socket_options, + ssl_context=proxy_ssl_context, ) + self._proxy_origin = proxy_origin + self._proxy_headers = enforce_headers(proxy_headers, name="proxy_headers") + self._remote_origin = remote_origin + + async def handle_async_request(self, request: Request) -> Response: + headers = merge_headers(self._proxy_headers, request.headers) + url = URL( + scheme=self._proxy_origin.scheme, + host=self._proxy_origin.host, + port=self._proxy_origin.port, + target=bytes(request.url), + ) + proxy_request = Request( + method=request.method, + url=url, + headers=headers, + content=request.stream, + extensions=request.extensions, + ) + return await self._connection.handle_async_request(proxy_request) - return status_code, headers, wrapped_stream, extensions + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._remote_origin - async def _tunnel_request( - self, - method: bytes, - url: URL, - headers: Headers, - stream: AsyncByteStream, - extensions: dict, - ) -> Tuple[int, Headers, AsyncByteStream, dict]: - """ - Tunnelled proxy requests require an initial CONNECT request to - establish the connection, and then send regular requests. - """ - timeout = cast(TimeoutDict, extensions.get("timeout", {})) - origin = url_to_origin(url) - connection = await self._get_connection_from_pool(origin) + async def aclose(self) -> None: + await self._connection.aclose() - if connection is None: - scheme, host, port = origin + def info(self) -> str: + return self._connection.info() - # First, create a connection to the proxy server - proxy_connection = AsyncHTTPConnection( - origin=self.proxy_origin, - http2=self._http2, - keepalive_expiry=self._keepalive_expiry, - ssl_context=self._ssl_context, - ) + def is_available(self) -> bool: + return self._connection.is_available() + + def has_expired(self) -> bool: + return self._connection.has_expired() + + def is_idle(self) -> bool: + return self._connection.is_idle() + + def is_closed(self) -> bool: + return self._connection.is_closed() - # Issue a CONNECT request... - - # CONNECT www.example.org:80 HTTP/1.1 - # [proxy-headers] - target = b"%b:%d" % (host, port) - connect_url = self.proxy_origin + (target,) - connect_headers = [(b"Host", target), (b"Accept", b"*/*")] - connect_headers = merge_headers(connect_headers, self.proxy_headers) - - try: - ( - proxy_status_code, - _, - proxy_stream, - _, - ) = await proxy_connection.handle_async_request( - b"CONNECT", - connect_url, + def __repr__(self) -> str: + return f"<{self.__class__.__name__} [{self.info()}]>" + + +class AsyncTunnelHTTPConnection(AsyncConnectionInterface): + def __init__( + self, + proxy_origin: Origin, + remote_origin: Origin, + ssl_context: Optional[ssl.SSLContext] = None, + proxy_ssl_context: Optional[ssl.SSLContext] = None, + proxy_headers: Optional[Sequence[Tuple[bytes, bytes]]] = None, + keepalive_expiry: Optional[float] = None, + http1: bool = True, + http2: bool = False, + network_backend: Optional[AsyncNetworkBackend] = None, + socket_options: Optional[Iterable[SOCKET_OPTION]] = None, + ) -> None: + self._connection: AsyncConnectionInterface = AsyncHTTPConnection( + origin=proxy_origin, + keepalive_expiry=keepalive_expiry, + network_backend=network_backend, + socket_options=socket_options, + ssl_context=proxy_ssl_context, + ) + self._proxy_origin = proxy_origin + self._remote_origin = remote_origin + self._ssl_context = ssl_context + self._proxy_ssl_context = proxy_ssl_context + self._proxy_headers = enforce_headers(proxy_headers, name="proxy_headers") + self._keepalive_expiry = keepalive_expiry + self._http1 = http1 + self._http2 = http2 + self._connect_lock = AsyncLock() + self._connected = False + + async def handle_async_request(self, request: Request) -> Response: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("connect", None) + + async with self._connect_lock: + if not self._connected: + target = b"%b:%d" % (self._remote_origin.host, self._remote_origin.port) + + connect_url = URL( + scheme=self._proxy_origin.scheme, + host=self._proxy_origin.host, + port=self._proxy_origin.port, + target=target, + ) + connect_headers = merge_headers( + [(b"Host", target), (b"Accept", b"*/*")], self._proxy_headers + ) + connect_request = Request( + method=b"CONNECT", + url=connect_url, headers=connect_headers, - stream=ByteStream(b""), - extensions=extensions, + extensions=request.extensions, ) - - proxy_reason = get_reason_phrase(proxy_status_code) - logger.trace( - "tunnel_response proxy_status_code=%r proxy_reason=%r ", - proxy_status_code, - proxy_reason, + connect_response = await self._connection.handle_async_request( + connect_request ) - # Read the response data without closing the socket - async for _ in proxy_stream: - pass - # See if the tunnel was successfully established. - if proxy_status_code < 200 or proxy_status_code > 299: - msg = "%d %s" % (proxy_status_code, proxy_reason) + if connect_response.status < 200 or connect_response.status > 299: + reason_bytes = connect_response.extensions.get("reason_phrase", b"") + reason_str = reason_bytes.decode("ascii", errors="ignore") + msg = "%d %s" % (connect_response.status, reason_str) + await self._connection.aclose() raise ProxyError(msg) - # Upgrade to TLS if required - # We assume the target speaks TLS on the specified port - if scheme == b"https": - await proxy_connection.start_tls(host, self._ssl_context, timeout) - except Exception as exc: - await proxy_connection.aclose() - raise ProxyError(exc) - - # The CONNECT request is successful, so we have now SWITCHED PROTOCOLS. - # This means the proxy connection is now unusable, and we must create - # a new one for regular requests, making sure to use the same socket to - # retain the tunnel. - connection = AsyncHTTPConnection( - origin=origin, - http2=self._http2, - keepalive_expiry=self._keepalive_expiry, - ssl_context=self._ssl_context, - socket=proxy_connection.socket, - ) - await self._add_to_pool(connection, timeout) - - # Once the connection has been established we can send requests on - # it as normal. - ( - status_code, - headers, - stream, - extensions, - ) = await connection.handle_async_request( - method, - url, - headers=headers, - stream=stream, - extensions=extensions, - ) + stream = connect_response.extensions["network_stream"] - wrapped_stream = ResponseByteStream( - stream, connection=connection, callback=self._response_closed - ) + # Upgrade the stream to SSL + ssl_context = ( + default_ssl_context() + if self._ssl_context is None + else self._ssl_context + ) + alpn_protocols = ["http/1.1", "h2"] if self._http2 else ["http/1.1"] + ssl_context.set_alpn_protocols(alpn_protocols) + + kwargs = { + "ssl_context": ssl_context, + "server_hostname": self._remote_origin.host.decode("ascii"), + "timeout": timeout, + } + async with Trace("start_tls", logger, request, kwargs) as trace: + stream = await stream.start_tls(**kwargs) + trace.return_value = stream + + # Determine if we should be using HTTP/1.1 or HTTP/2 + ssl_object = stream.get_extra_info("ssl_object") + http2_negotiated = ( + ssl_object is not None + and ssl_object.selected_alpn_protocol() == "h2" + ) + + # Create the HTTP/1.1 or HTTP/2 connection + if http2_negotiated or (self._http2 and not self._http1): + from .http2 import AsyncHTTP2Connection + + self._connection = AsyncHTTP2Connection( + origin=self._remote_origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + else: + self._connection = AsyncHTTP11Connection( + origin=self._remote_origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + + self._connected = True + return await self._connection.handle_async_request(request) + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._remote_origin + + async def aclose(self) -> None: + await self._connection.aclose() + + def info(self) -> str: + return self._connection.info() + + def is_available(self) -> bool: + return self._connection.is_available() + + def has_expired(self) -> bool: + return self._connection.has_expired() + + def is_idle(self) -> bool: + return self._connection.is_idle() + + def is_closed(self) -> bool: + return self._connection.is_closed() - return status_code, headers, wrapped_stream, extensions + def __repr__(self) -> str: + return f"<{self.__class__.__name__} [{self.info()}]>" diff --git a/packages/httpcore/_async/interfaces.py b/packages/httpcore/_async/interfaces.py new file mode 100644 index 000000000..c998dd276 --- /dev/null +++ b/packages/httpcore/_async/interfaces.py @@ -0,0 +1,135 @@ +from contextlib import asynccontextmanager +from typing import AsyncIterator, Optional, Union + +from .._models import ( + URL, + Extensions, + HeaderTypes, + Origin, + Request, + Response, + enforce_bytes, + enforce_headers, + enforce_url, + include_request_headers, +) + + +class AsyncRequestInterface: + async def request( + self, + method: Union[bytes, str], + url: Union[URL, bytes, str], + *, + headers: HeaderTypes = None, + content: Union[bytes, AsyncIterator[bytes], None] = None, + extensions: Optional[Extensions] = None, + ) -> Response: + # Strict type checking on our parameters. + method = enforce_bytes(method, name="method") + url = enforce_url(url, name="url") + headers = enforce_headers(headers, name="headers") + + # Include Host header, and optionally Content-Length or Transfer-Encoding. + headers = include_request_headers(headers, url=url, content=content) + + request = Request( + method=method, + url=url, + headers=headers, + content=content, + extensions=extensions, + ) + response = await self.handle_async_request(request) + try: + await response.aread() + finally: + await response.aclose() + return response + + @asynccontextmanager + async def stream( + self, + method: Union[bytes, str], + url: Union[URL, bytes, str], + *, + headers: HeaderTypes = None, + content: Union[bytes, AsyncIterator[bytes], None] = None, + extensions: Optional[Extensions] = None, + ) -> AsyncIterator[Response]: + # Strict type checking on our parameters. + method = enforce_bytes(method, name="method") + url = enforce_url(url, name="url") + headers = enforce_headers(headers, name="headers") + + # Include Host header, and optionally Content-Length or Transfer-Encoding. + headers = include_request_headers(headers, url=url, content=content) + + request = Request( + method=method, + url=url, + headers=headers, + content=content, + extensions=extensions, + ) + response = await self.handle_async_request(request) + try: + yield response + finally: + await response.aclose() + + async def handle_async_request(self, request: Request) -> Response: + raise NotImplementedError() # pragma: nocover + + +class AsyncConnectionInterface(AsyncRequestInterface): + async def aclose(self) -> None: + raise NotImplementedError() # pragma: nocover + + def info(self) -> str: + raise NotImplementedError() # pragma: nocover + + def can_handle_request(self, origin: Origin) -> bool: + raise NotImplementedError() # pragma: nocover + + def is_available(self) -> bool: + """ + Return `True` if the connection is currently able to accept an + outgoing request. + + An HTTP/1.1 connection will only be available if it is currently idle. + + An HTTP/2 connection will be available so long as the stream ID space is + not yet exhausted, and the connection is not in an error state. + + While the connection is being established we may not yet know if it is going + to result in an HTTP/1.1 or HTTP/2 connection. The connection should be + treated as being available, but might ultimately raise `NewConnectionRequired` + required exceptions if multiple requests are attempted over a connection + that ends up being established as HTTP/1.1. + """ + raise NotImplementedError() # pragma: nocover + + def has_expired(self) -> bool: + """ + Return `True` if the connection is in a state where it should be closed. + + This either means that the connection is idle and it has passed the + expiry time on its keep-alive, or that server has sent an EOF. + """ + raise NotImplementedError() # pragma: nocover + + def is_idle(self) -> bool: + """ + Return `True` if the connection is currently idle. + """ + raise NotImplementedError() # pragma: nocover + + def is_closed(self) -> bool: + """ + Return `True` if the connection has been closed. + + Used when a response is closed to determine if the connection may be + returned to the connection pool or not. + """ + raise NotImplementedError() # pragma: nocover diff --git a/packages/httpcore/_async/socks_proxy.py b/packages/httpcore/_async/socks_proxy.py new file mode 100644 index 000000000..08a065d6d --- /dev/null +++ b/packages/httpcore/_async/socks_proxy.py @@ -0,0 +1,342 @@ +import logging +import ssl +import typing + +from socksio import socks5 + +from .._backends.auto import AutoBackend +from .._backends.base import AsyncNetworkBackend, AsyncNetworkStream +from .._exceptions import ConnectionNotAvailable, ProxyError +from .._models import URL, Origin, Request, Response, enforce_bytes, enforce_url +from .._ssl import default_ssl_context +from .._synchronization import AsyncLock +from .._trace import Trace +from .connection_pool import AsyncConnectionPool +from .http11 import AsyncHTTP11Connection +from .interfaces import AsyncConnectionInterface + +logger = logging.getLogger("httpcore.socks") + + +AUTH_METHODS = { + b"\x00": "NO AUTHENTICATION REQUIRED", + b"\x01": "GSSAPI", + b"\x02": "USERNAME/PASSWORD", + b"\xff": "NO ACCEPTABLE METHODS", +} + +REPLY_CODES = { + b"\x00": "Succeeded", + b"\x01": "General SOCKS server failure", + b"\x02": "Connection not allowed by ruleset", + b"\x03": "Network unreachable", + b"\x04": "Host unreachable", + b"\x05": "Connection refused", + b"\x06": "TTL expired", + b"\x07": "Command not supported", + b"\x08": "Address type not supported", +} + + +async def _init_socks5_connection( + stream: AsyncNetworkStream, + *, + host: bytes, + port: int, + auth: typing.Optional[typing.Tuple[bytes, bytes]] = None, +) -> None: + conn = socks5.SOCKS5Connection() + + # Auth method request + auth_method = ( + socks5.SOCKS5AuthMethod.NO_AUTH_REQUIRED + if auth is None + else socks5.SOCKS5AuthMethod.USERNAME_PASSWORD + ) + conn.send(socks5.SOCKS5AuthMethodsRequest([auth_method])) + outgoing_bytes = conn.data_to_send() + await stream.write(outgoing_bytes) + + # Auth method response + incoming_bytes = await stream.read(max_bytes=4096) + response = conn.receive_data(incoming_bytes) + assert isinstance(response, socks5.SOCKS5AuthReply) + if response.method != auth_method: + requested = AUTH_METHODS.get(auth_method, "UNKNOWN") + responded = AUTH_METHODS.get(response.method, "UNKNOWN") + raise ProxyError( + f"Requested {requested} from proxy server, but got {responded}." + ) + + if response.method == socks5.SOCKS5AuthMethod.USERNAME_PASSWORD: + # Username/password request + assert auth is not None + username, password = auth + conn.send(socks5.SOCKS5UsernamePasswordRequest(username, password)) + outgoing_bytes = conn.data_to_send() + await stream.write(outgoing_bytes) + + # Username/password response + incoming_bytes = await stream.read(max_bytes=4096) + response = conn.receive_data(incoming_bytes) + assert isinstance(response, socks5.SOCKS5UsernamePasswordReply) + if not response.success: + raise ProxyError("Invalid username/password") + + # Connect request + conn.send( + socks5.SOCKS5CommandRequest.from_address( + socks5.SOCKS5Command.CONNECT, (host, port) + ) + ) + outgoing_bytes = conn.data_to_send() + await stream.write(outgoing_bytes) + + # Connect response + incoming_bytes = await stream.read(max_bytes=4096) + response = conn.receive_data(incoming_bytes) + assert isinstance(response, socks5.SOCKS5Reply) + if response.reply_code != socks5.SOCKS5ReplyCode.SUCCEEDED: + reply_code = REPLY_CODES.get(response.reply_code, "UNKOWN") + raise ProxyError(f"Proxy Server could not connect: {reply_code}.") + + +class AsyncSOCKSProxy(AsyncConnectionPool): + """ + A connection pool that sends requests via an HTTP proxy. + """ + + def __init__( + self, + proxy_url: typing.Union[URL, bytes, str], + proxy_auth: typing.Optional[ + typing.Tuple[typing.Union[bytes, str], typing.Union[bytes, str]] + ] = None, + ssl_context: typing.Optional[ssl.SSLContext] = None, + max_connections: typing.Optional[int] = 10, + max_keepalive_connections: typing.Optional[int] = None, + keepalive_expiry: typing.Optional[float] = None, + http1: bool = True, + http2: bool = False, + retries: int = 0, + network_backend: typing.Optional[AsyncNetworkBackend] = None, + ) -> None: + """ + A connection pool for making HTTP requests. + + Parameters: + proxy_url: The URL to use when connecting to the proxy server. + For example `"http://127.0.0.1:8080/"`. + ssl_context: An SSL context to use for verifying connections. + If not specified, the default `httpcore.default_ssl_context()` + will be used. + max_connections: The maximum number of concurrent HTTP connections that + the pool should allow. Any attempt to send a request on a pool that + would exceed this amount will block until a connection is available. + max_keepalive_connections: The maximum number of idle HTTP connections + that will be maintained in the pool. + keepalive_expiry: The duration in seconds that an idle HTTP connection + may be maintained for before being expired from the pool. + http1: A boolean indicating if HTTP/1.1 requests should be supported + by the connection pool. Defaults to True. + http2: A boolean indicating if HTTP/2 requests should be supported by + the connection pool. Defaults to False. + retries: The maximum number of retries when trying to establish + a connection. + local_address: Local address to connect from. Can also be used to + connect using a particular address family. Using + `local_address="0.0.0.0"` will connect using an `AF_INET` address + (IPv4), while using `local_address="::"` will connect using an + `AF_INET6` address (IPv6). + uds: Path to a Unix Domain Socket to use instead of TCP sockets. + network_backend: A backend instance to use for handling network I/O. + """ + super().__init__( + ssl_context=ssl_context, + max_connections=max_connections, + max_keepalive_connections=max_keepalive_connections, + keepalive_expiry=keepalive_expiry, + http1=http1, + http2=http2, + network_backend=network_backend, + retries=retries, + ) + self._ssl_context = ssl_context + self._proxy_url = enforce_url(proxy_url, name="proxy_url") + if proxy_auth is not None: + username, password = proxy_auth + username_bytes = enforce_bytes(username, name="proxy_auth") + password_bytes = enforce_bytes(password, name="proxy_auth") + self._proxy_auth: typing.Optional[typing.Tuple[bytes, bytes]] = ( + username_bytes, + password_bytes, + ) + else: + self._proxy_auth = None + + def create_connection(self, origin: Origin) -> AsyncConnectionInterface: + return AsyncSocks5Connection( + proxy_origin=self._proxy_url.origin, + remote_origin=origin, + proxy_auth=self._proxy_auth, + ssl_context=self._ssl_context, + keepalive_expiry=self._keepalive_expiry, + http1=self._http1, + http2=self._http2, + network_backend=self._network_backend, + ) + + +class AsyncSocks5Connection(AsyncConnectionInterface): + def __init__( + self, + proxy_origin: Origin, + remote_origin: Origin, + proxy_auth: typing.Optional[typing.Tuple[bytes, bytes]] = None, + ssl_context: typing.Optional[ssl.SSLContext] = None, + keepalive_expiry: typing.Optional[float] = None, + http1: bool = True, + http2: bool = False, + network_backend: typing.Optional[AsyncNetworkBackend] = None, + ) -> None: + self._proxy_origin = proxy_origin + self._remote_origin = remote_origin + self._proxy_auth = proxy_auth + self._ssl_context = ssl_context + self._keepalive_expiry = keepalive_expiry + self._http1 = http1 + self._http2 = http2 + + self._network_backend: AsyncNetworkBackend = ( + AutoBackend() if network_backend is None else network_backend + ) + self._connect_lock = AsyncLock() + self._connection: typing.Optional[AsyncConnectionInterface] = None + self._connect_failed = False + + async def handle_async_request(self, request: Request) -> Response: + timeouts = request.extensions.get("timeout", {}) + sni_hostname = request.extensions.get("sni_hostname", None) + timeout = timeouts.get("connect", None) + + async with self._connect_lock: + if self._connection is None: + try: + # Connect to the proxy + kwargs = { + "host": self._proxy_origin.host.decode("ascii"), + "port": self._proxy_origin.port, + "timeout": timeout, + } + with Trace("connect_tcp", logger, request, kwargs) as trace: + stream = await self._network_backend.connect_tcp(**kwargs) + trace.return_value = stream + + # Connect to the remote host using socks5 + kwargs = { + "stream": stream, + "host": self._remote_origin.host.decode("ascii"), + "port": self._remote_origin.port, + "auth": self._proxy_auth, + } + with Trace( + "setup_socks5_connection", logger, request, kwargs + ) as trace: + await _init_socks5_connection(**kwargs) + trace.return_value = stream + + # Upgrade the stream to SSL + if self._remote_origin.scheme == b"https": + ssl_context = ( + default_ssl_context() + if self._ssl_context is None + else self._ssl_context + ) + alpn_protocols = ( + ["http/1.1", "h2"] if self._http2 else ["http/1.1"] + ) + ssl_context.set_alpn_protocols(alpn_protocols) + + kwargs = { + "ssl_context": ssl_context, + "server_hostname": sni_hostname + or self._remote_origin.host.decode("ascii"), + "timeout": timeout, + } + async with Trace("start_tls", logger, request, kwargs) as trace: + stream = await stream.start_tls(**kwargs) + trace.return_value = stream + + # Determine if we should be using HTTP/1.1 or HTTP/2 + ssl_object = stream.get_extra_info("ssl_object") + http2_negotiated = ( + ssl_object is not None + and ssl_object.selected_alpn_protocol() == "h2" + ) + + # Create the HTTP/1.1 or HTTP/2 connection + if http2_negotiated or ( + self._http2 and not self._http1 + ): # pragma: nocover + from .http2 import AsyncHTTP2Connection + + self._connection = AsyncHTTP2Connection( + origin=self._remote_origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + else: + self._connection = AsyncHTTP11Connection( + origin=self._remote_origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + except Exception as exc: + self._connect_failed = True + raise exc + elif not self._connection.is_available(): # pragma: nocover + raise ConnectionNotAvailable() + + return await self._connection.handle_async_request(request) + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._remote_origin + + async def aclose(self) -> None: + if self._connection is not None: + await self._connection.aclose() + + def is_available(self) -> bool: + if self._connection is None: # pragma: nocover + # If HTTP/2 support is enabled, and the resulting connection could + # end up as HTTP/2 then we should indicate the connection as being + # available to service multiple requests. + return ( + self._http2 + and (self._remote_origin.scheme == b"https" or not self._http1) + and not self._connect_failed + ) + return self._connection.is_available() + + def has_expired(self) -> bool: + if self._connection is None: # pragma: nocover + return self._connect_failed + return self._connection.has_expired() + + def is_idle(self) -> bool: + if self._connection is None: # pragma: nocover + return self._connect_failed + return self._connection.is_idle() + + def is_closed(self) -> bool: + if self._connection is None: # pragma: nocover + return self._connect_failed + return self._connection.is_closed() + + def info(self) -> str: + if self._connection is None: # pragma: nocover + return "CONNECTION FAILED" if self._connect_failed else "CONNECTING" + return self._connection.info() + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} [{self.info()}]>" diff --git a/packages/httpcore/_backends/anyio.py b/packages/httpcore/_backends/anyio.py index b1332a27f..1ed5228db 100644 --- a/packages/httpcore/_backends/anyio.py +++ b/packages/httpcore/_backends/anyio.py @@ -1,10 +1,7 @@ -from ssl import SSLContext -from typing import Optional +import ssl +import typing -import anyio.abc -from anyio import BrokenResourceError, EndOfStream -from anyio.abc import ByteStream, SocketAttribute -from anyio.streams.tls import TLSAttribute, TLSStream +import anyio from .._exceptions import ( ConnectError, @@ -15,187 +12,134 @@ WriteTimeout, map_exceptions, ) -from .._types import TimeoutDict from .._utils import is_socket_readable -from .base import AsyncBackend, AsyncLock, AsyncSemaphore, AsyncSocketStream +from .base import SOCKET_OPTION, AsyncNetworkBackend, AsyncNetworkStream -class SocketStream(AsyncSocketStream): - def __init__(self, stream: ByteStream) -> None: - self.stream = stream - self.read_lock = anyio.Lock() - self.write_lock = anyio.Lock() +class AnyIOStream(AsyncNetworkStream): + def __init__(self, stream: anyio.abc.ByteStream) -> None: + self._stream = stream - def get_http_version(self) -> str: - alpn_protocol = self.stream.extra(TLSAttribute.alpn_protocol, None) - return "HTTP/2" if alpn_protocol == "h2" else "HTTP/1.1" - - async def start_tls( - self, - hostname: bytes, - ssl_context: SSLContext, - timeout: TimeoutDict, - ) -> "SocketStream": - connect_timeout = timeout.get("connect") - try: - with anyio.fail_after(connect_timeout): - ssl_stream = await TLSStream.wrap( - self.stream, - ssl_context=ssl_context, - hostname=hostname.decode("ascii"), - standard_compatible=False, - ) - except TimeoutError: - raise ConnectTimeout from None - except BrokenResourceError as exc: - raise ConnectError from exc - - return SocketStream(ssl_stream) - - async def read(self, n: int, timeout: TimeoutDict) -> bytes: - read_timeout = timeout.get("read") - async with self.read_lock: - try: - with anyio.fail_after(read_timeout): - return await self.stream.receive(n) - except TimeoutError: - await self.stream.aclose() - raise ReadTimeout from None - except BrokenResourceError as exc: - raise ReadError from exc - except EndOfStream: - return b"" - - async def write(self, data: bytes, timeout: TimeoutDict) -> None: - if not data: + async def read( + self, max_bytes: int, timeout: typing.Optional[float] = None + ) -> bytes: + exc_map = { + TimeoutError: ReadTimeout, + anyio.BrokenResourceError: ReadError, + anyio.ClosedResourceError: ReadError, + } + with map_exceptions(exc_map): + with anyio.fail_after(timeout): + try: + return await self._stream.receive(max_bytes=max_bytes) + except anyio.EndOfStream: # pragma: nocover + return b"" + + async def write( + self, buffer: bytes, timeout: typing.Optional[float] = None + ) -> None: + if not buffer: return - write_timeout = timeout.get("write") - async with self.write_lock: - try: - with anyio.fail_after(write_timeout): - return await self.stream.send(data) - except TimeoutError: - await self.stream.aclose() - raise WriteTimeout from None - except BrokenResourceError as exc: - raise WriteError from exc + exc_map = { + TimeoutError: WriteTimeout, + anyio.BrokenResourceError: WriteError, + anyio.ClosedResourceError: WriteError, + } + with map_exceptions(exc_map): + with anyio.fail_after(timeout): + await self._stream.send(item=buffer) async def aclose(self) -> None: - async with self.write_lock: - try: - await self.stream.aclose() - except BrokenResourceError: - pass - - def is_readable(self) -> bool: - sock = self.stream.extra(SocketAttribute.raw_socket) - return is_socket_readable(sock) - - -class Lock(AsyncLock): - def __init__(self) -> None: - self._lock = anyio.Lock() - - async def release(self) -> None: - self._lock.release() - - async def acquire(self) -> None: - await self._lock.acquire() - - -class Semaphore(AsyncSemaphore): - def __init__(self, max_value: int, exc_class: type): - self.max_value = max_value - self.exc_class = exc_class - - @property - def semaphore(self) -> anyio.abc.Semaphore: - if not hasattr(self, "_semaphore"): - self._semaphore = anyio.Semaphore(self.max_value) - return self._semaphore - - async def acquire(self, timeout: float = None) -> None: - with anyio.move_on_after(timeout): - await self.semaphore.acquire() - return + await self._stream.aclose() - raise self.exc_class() - - async def release(self) -> None: - self.semaphore.release() - - -class AnyIOBackend(AsyncBackend): - async def open_tcp_stream( + async def start_tls( self, - hostname: bytes, - port: int, - ssl_context: Optional[SSLContext], - timeout: TimeoutDict, - *, - local_address: Optional[str], - ) -> AsyncSocketStream: - connect_timeout = timeout.get("connect") - unicode_host = hostname.decode("utf-8") + ssl_context: ssl.SSLContext, + server_hostname: typing.Optional[str] = None, + timeout: typing.Optional[float] = None, + ) -> AsyncNetworkStream: exc_map = { TimeoutError: ConnectTimeout, - OSError: ConnectError, - BrokenResourceError: ConnectError, + anyio.BrokenResourceError: ConnectError, } - with map_exceptions(exc_map): - with anyio.fail_after(connect_timeout): - stream: anyio.abc.ByteStream - stream = await anyio.connect_tcp( - unicode_host, port, local_host=local_address - ) - if ssl_context: - stream = await TLSStream.wrap( - stream, - hostname=unicode_host, + try: + with anyio.fail_after(timeout): + ssl_stream = await anyio.streams.tls.TLSStream.wrap( + self._stream, ssl_context=ssl_context, + hostname=server_hostname, standard_compatible=False, + server_side=False, ) + except Exception as exc: # pragma: nocover + await self.aclose() + raise exc + return AnyIOStream(ssl_stream) + + def get_extra_info(self, info: str) -> typing.Any: + if info == "ssl_object": + return self._stream.extra(anyio.streams.tls.TLSAttribute.ssl_object, None) + if info == "client_addr": + return self._stream.extra(anyio.abc.SocketAttribute.local_address, None) + if info == "server_addr": + return self._stream.extra(anyio.abc.SocketAttribute.remote_address, None) + if info == "socket": + return self._stream.extra(anyio.abc.SocketAttribute.raw_socket, None) + if info == "is_readable": + sock = self._stream.extra(anyio.abc.SocketAttribute.raw_socket, None) + return is_socket_readable(sock) + return None + + +class AnyIOBackend(AsyncNetworkBackend): + async def connect_tcp( + self, + host: str, + port: int, + timeout: typing.Optional[float] = None, + local_address: typing.Optional[str] = None, + socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, + ) -> AsyncNetworkStream: + if socket_options is None: + socket_options = [] # pragma: no cover + exc_map = { + TimeoutError: ConnectTimeout, + OSError: ConnectError, + anyio.BrokenResourceError: ConnectError, + } + with map_exceptions(exc_map): + with anyio.fail_after(timeout): + stream: anyio.abc.ByteStream = await anyio.connect_tcp( + remote_host=host, + remote_port=port, + local_host=local_address, + ) + # By default TCP sockets opened in `asyncio` include TCP_NODELAY. + for option in socket_options: + stream._raw_socket.setsockopt(*option) # type: ignore[attr-defined] # pragma: no cover + return AnyIOStream(stream) - return SocketStream(stream=stream) - - async def open_uds_stream( + async def connect_unix_socket( self, path: str, - hostname: bytes, - ssl_context: Optional[SSLContext], - timeout: TimeoutDict, - ) -> AsyncSocketStream: - connect_timeout = timeout.get("connect") - unicode_host = hostname.decode("utf-8") + timeout: typing.Optional[float] = None, + socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, + ) -> AsyncNetworkStream: # pragma: nocover + if socket_options is None: + socket_options = [] exc_map = { TimeoutError: ConnectTimeout, OSError: ConnectError, - BrokenResourceError: ConnectError, + anyio.BrokenResourceError: ConnectError, } - with map_exceptions(exc_map): - with anyio.fail_after(connect_timeout): + with anyio.fail_after(timeout): stream: anyio.abc.ByteStream = await anyio.connect_unix(path) - if ssl_context: - stream = await TLSStream.wrap( - stream, - hostname=unicode_host, - ssl_context=ssl_context, - standard_compatible=False, - ) - - return SocketStream(stream=stream) - - def create_lock(self) -> AsyncLock: - return Lock() - - def create_semaphore(self, max_value: int, exc_class: type) -> AsyncSemaphore: - return Semaphore(max_value, exc_class=exc_class) - - async def time(self) -> float: - return float(anyio.current_time()) + for option in socket_options: + stream._raw_socket.setsockopt(*option) # type: ignore[attr-defined] # pragma: no cover + return AnyIOStream(stream) async def sleep(self, seconds: float) -> None: - await anyio.sleep(seconds) + await anyio.sleep(seconds) # pragma: nocover diff --git a/packages/httpcore/_backends/asyncio.py b/packages/httpcore/_backends/asyncio.py deleted file mode 100644 index 5142072e0..000000000 --- a/packages/httpcore/_backends/asyncio.py +++ /dev/null @@ -1,303 +0,0 @@ -import asyncio -import socket -from ssl import SSLContext -from typing import Optional - -from .._exceptions import ( - ConnectError, - ConnectTimeout, - ReadError, - ReadTimeout, - WriteError, - WriteTimeout, - map_exceptions, -) -from .._types import TimeoutDict -from .._utils import is_socket_readable -from .base import AsyncBackend, AsyncLock, AsyncSemaphore, AsyncSocketStream - -SSL_MONKEY_PATCH_APPLIED = False - - -def ssl_monkey_patch() -> None: - """ - Monkey-patch for https://bugs.python.org/issue36709 - - This prevents console errors when outstanding HTTPS connections - still exist at the point of exiting. - - Clients which have been opened using a `with` block, or which have - had `close()` closed, will not exhibit this issue in the first place. - """ - MonkeyPatch = asyncio.selector_events._SelectorSocketTransport # type: ignore - - _write = MonkeyPatch.write - - def _fixed_write(self, data: bytes) -> None: # type: ignore - if self._loop and not self._loop.is_closed(): - _write(self, data) - - MonkeyPatch.write = _fixed_write - - -async def backport_start_tls( - transport: asyncio.BaseTransport, - protocol: asyncio.BaseProtocol, - ssl_context: SSLContext, - *, - server_side: bool = False, - server_hostname: str = None, - ssl_handshake_timeout: float = None, -) -> asyncio.Transport: # pragma: nocover (Since it's not used on all Python versions.) - """ - Python 3.6 asyncio doesn't have a start_tls() method on the loop - so we use this function in place of the loop's start_tls() method. - Adapted from this comment: - https://github.com/urllib3/urllib3/issues/1323#issuecomment-362494839 - """ - import asyncio.sslproto - - loop = asyncio.get_event_loop() - waiter = loop.create_future() - ssl_protocol = asyncio.sslproto.SSLProtocol( - loop, - protocol, - ssl_context, - waiter, - server_side=False, - server_hostname=server_hostname, - call_connection_made=False, - ) - - transport.set_protocol(ssl_protocol) - loop.call_soon(ssl_protocol.connection_made, transport) - loop.call_soon(transport.resume_reading) # type: ignore - - await waiter - return ssl_protocol._app_transport - - -class SocketStream(AsyncSocketStream): - def __init__( - self, stream_reader: asyncio.StreamReader, stream_writer: asyncio.StreamWriter - ): - self.stream_reader = stream_reader - self.stream_writer = stream_writer - self.read_lock = asyncio.Lock() - self.write_lock = asyncio.Lock() - - def get_http_version(self) -> str: - ssl_object = self.stream_writer.get_extra_info("ssl_object") - - if ssl_object is None: - return "HTTP/1.1" - - ident = ssl_object.selected_alpn_protocol() - return "HTTP/2" if ident == "h2" else "HTTP/1.1" - - async def start_tls( - self, hostname: bytes, ssl_context: SSLContext, timeout: TimeoutDict - ) -> "SocketStream": - loop = asyncio.get_event_loop() - - stream_reader = asyncio.StreamReader() - protocol = asyncio.StreamReaderProtocol(stream_reader) - transport = self.stream_writer.transport - - loop_start_tls = getattr(loop, "start_tls", backport_start_tls) - - exc_map = {asyncio.TimeoutError: ConnectTimeout, OSError: ConnectError} - - with map_exceptions(exc_map): - transport = await asyncio.wait_for( - loop_start_tls( - transport, - protocol, - ssl_context, - server_hostname=hostname.decode("ascii"), - ), - timeout=timeout.get("connect"), - ) - - # Initialize the protocol, so it is made aware of being tied to - # a TLS connection. - # See: https://github.com/encode/httpx/issues/859 - protocol.connection_made(transport) - - stream_writer = asyncio.StreamWriter( - transport=transport, protocol=protocol, reader=stream_reader, loop=loop - ) - - ssl_stream = SocketStream(stream_reader, stream_writer) - # When we return a new SocketStream with new StreamReader/StreamWriter instances - # we need to keep references to the old StreamReader/StreamWriter so that they - # are not garbage collected and closed while we're still using them. - ssl_stream._inner = self # type: ignore - return ssl_stream - - async def read(self, n: int, timeout: TimeoutDict) -> bytes: - exc_map = {asyncio.TimeoutError: ReadTimeout, OSError: ReadError} - async with self.read_lock: - with map_exceptions(exc_map): - try: - return await asyncio.wait_for( - self.stream_reader.read(n), timeout.get("read") - ) - except AttributeError as exc: # pragma: nocover - if "resume_reading" in str(exc): - # Python's asyncio has a bug that can occur when a - # connection has been closed, while it is paused. - # See: https://github.com/encode/httpx/issues/1213 - # - # Returning an empty byte-string to indicate connection - # close will eventually raise an httpcore.RemoteProtocolError - # to the user when this goes through our HTTP parsing layer. - return b"" - raise - - async def write(self, data: bytes, timeout: TimeoutDict) -> None: - if not data: - return - - exc_map = {asyncio.TimeoutError: WriteTimeout, OSError: WriteError} - async with self.write_lock: - with map_exceptions(exc_map): - self.stream_writer.write(data) - return await asyncio.wait_for( - self.stream_writer.drain(), timeout.get("write") - ) - - async def aclose(self) -> None: - # SSL connections should issue the close and then abort, rather than - # waiting for the remote end of the connection to signal the EOF. - # - # See: - # - # * https://bugs.python.org/issue39758 - # * https://github.com/python-trio/trio/blob/ - # 31e2ae866ad549f1927d45ce073d4f0ea9f12419/trio/_ssl.py#L779-L829 - # - # And related issues caused if we simply omit the 'wait_closed' call, - # without first using `.abort()` - # - # * https://github.com/encode/httpx/issues/825 - # * https://github.com/encode/httpx/issues/914 - is_ssl = self.stream_writer.get_extra_info("ssl_object") is not None - - async with self.write_lock: - try: - self.stream_writer.close() - if is_ssl: - # Give the connection a chance to write any data in the buffer, - # and then forcibly tear down the SSL connection. - await asyncio.sleep(0) - self.stream_writer.transport.abort() # type: ignore - if hasattr(self.stream_writer, "wait_closed"): - # Python 3.7+ only. - await self.stream_writer.wait_closed() # type: ignore - except OSError: - pass - - def is_readable(self) -> bool: - transport = self.stream_reader._transport # type: ignore - sock: Optional[socket.socket] = transport.get_extra_info("socket") - return is_socket_readable(sock) - - -class Lock(AsyncLock): - def __init__(self) -> None: - self._lock = asyncio.Lock() - - async def release(self) -> None: - self._lock.release() - - async def acquire(self) -> None: - await self._lock.acquire() - - -class Semaphore(AsyncSemaphore): - def __init__(self, max_value: int, exc_class: type) -> None: - self.max_value = max_value - self.exc_class = exc_class - - @property - def semaphore(self) -> asyncio.BoundedSemaphore: - if not hasattr(self, "_semaphore"): - self._semaphore = asyncio.BoundedSemaphore(value=self.max_value) - return self._semaphore - - async def acquire(self, timeout: float = None) -> None: - try: - await asyncio.wait_for(self.semaphore.acquire(), timeout) - except asyncio.TimeoutError: - raise self.exc_class() - - async def release(self) -> None: - self.semaphore.release() - - -class AsyncioBackend(AsyncBackend): - def __init__(self) -> None: - global SSL_MONKEY_PATCH_APPLIED - - if not SSL_MONKEY_PATCH_APPLIED: - ssl_monkey_patch() - SSL_MONKEY_PATCH_APPLIED = True - - async def open_tcp_stream( - self, - hostname: bytes, - port: int, - ssl_context: Optional[SSLContext], - timeout: TimeoutDict, - *, - local_address: Optional[str], - ) -> SocketStream: - host = hostname.decode("ascii") - connect_timeout = timeout.get("connect") - local_addr = None if local_address is None else (local_address, 0) - - exc_map = {asyncio.TimeoutError: ConnectTimeout, OSError: ConnectError} - with map_exceptions(exc_map): - stream_reader, stream_writer = await asyncio.wait_for( - asyncio.open_connection( - host, port, ssl=ssl_context, local_addr=local_addr - ), - connect_timeout, - ) - return SocketStream( - stream_reader=stream_reader, stream_writer=stream_writer - ) - - async def open_uds_stream( - self, - path: str, - hostname: bytes, - ssl_context: Optional[SSLContext], - timeout: TimeoutDict, - ) -> AsyncSocketStream: - host = hostname.decode("ascii") - connect_timeout = timeout.get("connect") - kwargs: dict = {"server_hostname": host} if ssl_context is not None else {} - exc_map = {asyncio.TimeoutError: ConnectTimeout, OSError: ConnectError} - with map_exceptions(exc_map): - stream_reader, stream_writer = await asyncio.wait_for( - asyncio.open_unix_connection(path, ssl=ssl_context, **kwargs), - connect_timeout, - ) - return SocketStream( - stream_reader=stream_reader, stream_writer=stream_writer - ) - - def create_lock(self) -> AsyncLock: - return Lock() - - def create_semaphore(self, max_value: int, exc_class: type) -> AsyncSemaphore: - return Semaphore(max_value, exc_class=exc_class) - - async def time(self) -> float: - loop = asyncio.get_event_loop() - return loop.time() - - async def sleep(self, seconds: float) -> None: - await asyncio.sleep(seconds) diff --git a/packages/httpcore/_backends/auto.py b/packages/httpcore/_backends/auto.py index 5579ab467..b612ba071 100644 --- a/packages/httpcore/_backends/auto.py +++ b/packages/httpcore/_backends/auto.py @@ -1,67 +1,52 @@ -from ssl import SSLContext +import typing from typing import Optional import sniffio -from .._types import TimeoutDict -from .base import AsyncBackend, AsyncLock, AsyncSemaphore, AsyncSocketStream +from .base import SOCKET_OPTION, AsyncNetworkBackend, AsyncNetworkStream -# The following line is imported from the _sync modules -from .sync import SyncBackend, SyncLock, SyncSemaphore, SyncSocketStream # noqa - -class AutoBackend(AsyncBackend): - @property - def backend(self) -> AsyncBackend: - if not hasattr(self, "_backend_implementation"): +class AutoBackend(AsyncNetworkBackend): + async def _init_backend(self) -> None: + if not (hasattr(self, "_backend")): backend = sniffio.current_async_library() - - if backend == "asyncio": - from .anyio import AnyIOBackend - - self._backend_implementation: AsyncBackend = AnyIOBackend() - elif backend == "trio": + if backend == "trio": from .trio import TrioBackend - self._backend_implementation = TrioBackend() - elif backend == "curio": - from .curio import CurioBackend + self._backend: AsyncNetworkBackend = TrioBackend() + else: + from .anyio import AnyIOBackend - self._backend_implementation = CurioBackend() - else: # pragma: nocover - raise RuntimeError(f"Unsupported concurrency backend {backend!r}") - return self._backend_implementation + self._backend = AnyIOBackend() - async def open_tcp_stream( + async def connect_tcp( self, - hostname: bytes, + host: str, port: int, - ssl_context: Optional[SSLContext], - timeout: TimeoutDict, - *, - local_address: Optional[str], - ) -> AsyncSocketStream: - return await self.backend.open_tcp_stream( - hostname, port, ssl_context, timeout, local_address=local_address + timeout: Optional[float] = None, + local_address: Optional[str] = None, + socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, + ) -> AsyncNetworkStream: + await self._init_backend() + return await self._backend.connect_tcp( + host, + port, + timeout=timeout, + local_address=local_address, + socket_options=socket_options, ) - async def open_uds_stream( + async def connect_unix_socket( self, path: str, - hostname: bytes, - ssl_context: Optional[SSLContext], - timeout: TimeoutDict, - ) -> AsyncSocketStream: - return await self.backend.open_uds_stream(path, hostname, ssl_context, timeout) - - def create_lock(self) -> AsyncLock: - return self.backend.create_lock() - - def create_semaphore(self, max_value: int, exc_class: type) -> AsyncSemaphore: - return self.backend.create_semaphore(max_value, exc_class=exc_class) - - async def time(self) -> float: - return await self.backend.time() + timeout: Optional[float] = None, + socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, + ) -> AsyncNetworkStream: # pragma: nocover + await self._init_backend() + return await self._backend.connect_unix_socket( + path, timeout=timeout, socket_options=socket_options + ) - async def sleep(self, seconds: float) -> None: - await self.backend.sleep(seconds) + async def sleep(self, seconds: float) -> None: # pragma: nocover + await self._init_backend() + return await self._backend.sleep(seconds) diff --git a/packages/httpcore/_backends/base.py b/packages/httpcore/_backends/base.py index 1ca6e31b5..6cadedb5f 100644 --- a/packages/httpcore/_backends/base.py +++ b/packages/httpcore/_backends/base.py @@ -1,137 +1,103 @@ -from ssl import SSLContext -from types import TracebackType -from typing import TYPE_CHECKING, Optional, Type +import ssl +import time +import typing -from .._types import TimeoutDict +SOCKET_OPTION = typing.Union[ + typing.Tuple[int, int, int], + typing.Tuple[int, int, typing.Union[bytes, bytearray]], + typing.Tuple[int, int, None, int], +] -if TYPE_CHECKING: # pragma: no cover - from .sync import SyncBackend +class NetworkStream: + def read(self, max_bytes: int, timeout: typing.Optional[float] = None) -> bytes: + raise NotImplementedError() # pragma: nocover -def lookup_async_backend(name: str) -> "AsyncBackend": - if name == "auto": - from .auto import AutoBackend + def write(self, buffer: bytes, timeout: typing.Optional[float] = None) -> None: + raise NotImplementedError() # pragma: nocover - return AutoBackend() - elif name == "asyncio": - from .asyncio import AsyncioBackend - - return AsyncioBackend() - elif name == "trio": - from .trio import TrioBackend - - return TrioBackend() - elif name == "curio": - from .curio import CurioBackend - - return CurioBackend() - elif name == "anyio": - from .anyio import AnyIOBackend - - return AnyIOBackend() - - raise ValueError("Invalid backend name {name!r}") + def close(self) -> None: + raise NotImplementedError() # pragma: nocover + def start_tls( + self, + ssl_context: ssl.SSLContext, + server_hostname: typing.Optional[str] = None, + timeout: typing.Optional[float] = None, + ) -> "NetworkStream": + raise NotImplementedError() # pragma: nocover -def lookup_sync_backend(name: str) -> "SyncBackend": - from .sync import SyncBackend + def get_extra_info(self, info: str) -> typing.Any: + return None # pragma: nocover - return SyncBackend() +class NetworkBackend: + def connect_tcp( + self, + host: str, + port: int, + timeout: typing.Optional[float] = None, + local_address: typing.Optional[str] = None, + socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, + ) -> NetworkStream: + raise NotImplementedError() # pragma: nocover -class AsyncSocketStream: - """ - A socket stream with read/write operations. Abstracts away any asyncio-specific - interfaces into a more generic base class, that we can use with alternate - backends, or for stand-alone test cases. - """ + def connect_unix_socket( + self, + path: str, + timeout: typing.Optional[float] = None, + socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, + ) -> NetworkStream: + raise NotImplementedError() # pragma: nocover - def get_http_version(self) -> str: - raise NotImplementedError() # pragma: no cover + def sleep(self, seconds: float) -> None: + time.sleep(seconds) # pragma: nocover - async def start_tls( - self, hostname: bytes, ssl_context: SSLContext, timeout: TimeoutDict - ) -> "AsyncSocketStream": - raise NotImplementedError() # pragma: no cover - async def read(self, n: int, timeout: TimeoutDict) -> bytes: - raise NotImplementedError() # pragma: no cover +class AsyncNetworkStream: + async def read( + self, max_bytes: int, timeout: typing.Optional[float] = None + ) -> bytes: + raise NotImplementedError() # pragma: nocover - async def write(self, data: bytes, timeout: TimeoutDict) -> None: - raise NotImplementedError() # pragma: no cover + async def write( + self, buffer: bytes, timeout: typing.Optional[float] = None + ) -> None: + raise NotImplementedError() # pragma: nocover async def aclose(self) -> None: - raise NotImplementedError() # pragma: no cover - - def is_readable(self) -> bool: - raise NotImplementedError() # pragma: no cover - + raise NotImplementedError() # pragma: nocover -class AsyncLock: - """ - An abstract interface for Lock classes. - """ - - async def __aenter__(self) -> None: - await self.acquire() - - async def __aexit__( + async def start_tls( self, - exc_type: Type[BaseException] = None, - exc_value: BaseException = None, - traceback: TracebackType = None, - ) -> None: - await self.release() - - async def release(self) -> None: - raise NotImplementedError() # pragma: no cover - - async def acquire(self) -> None: - raise NotImplementedError() # pragma: no cover - + ssl_context: ssl.SSLContext, + server_hostname: typing.Optional[str] = None, + timeout: typing.Optional[float] = None, + ) -> "AsyncNetworkStream": + raise NotImplementedError() # pragma: nocover -class AsyncSemaphore: - """ - An abstract interface for Semaphore classes. - Abstracts away any asyncio-specific interfaces. - """ + def get_extra_info(self, info: str) -> typing.Any: + return None # pragma: nocover - async def acquire(self, timeout: float = None) -> None: - raise NotImplementedError() # pragma: no cover - async def release(self) -> None: - raise NotImplementedError() # pragma: no cover - - -class AsyncBackend: - async def open_tcp_stream( +class AsyncNetworkBackend: + async def connect_tcp( self, - hostname: bytes, + host: str, port: int, - ssl_context: Optional[SSLContext], - timeout: TimeoutDict, - *, - local_address: Optional[str], - ) -> AsyncSocketStream: - raise NotImplementedError() # pragma: no cover - - async def open_uds_stream( + timeout: typing.Optional[float] = None, + local_address: typing.Optional[str] = None, + socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, + ) -> AsyncNetworkStream: + raise NotImplementedError() # pragma: nocover + + async def connect_unix_socket( self, path: str, - hostname: bytes, - ssl_context: Optional[SSLContext], - timeout: TimeoutDict, - ) -> AsyncSocketStream: - raise NotImplementedError() # pragma: no cover - - def create_lock(self) -> AsyncLock: - raise NotImplementedError() # pragma: no cover - - def create_semaphore(self, max_value: int, exc_class: type) -> AsyncSemaphore: - raise NotImplementedError() # pragma: no cover - - async def time(self) -> float: - raise NotImplementedError() # pragma: no cover + timeout: typing.Optional[float] = None, + socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, + ) -> AsyncNetworkStream: + raise NotImplementedError() # pragma: nocover async def sleep(self, seconds: float) -> None: - raise NotImplementedError() # pragma: no cover + raise NotImplementedError() # pragma: nocover diff --git a/packages/httpcore/_backends/curio.py b/packages/httpcore/_backends/curio.py deleted file mode 100644 index 99a7b2cc8..000000000 --- a/packages/httpcore/_backends/curio.py +++ /dev/null @@ -1,206 +0,0 @@ -from ssl import SSLContext, SSLSocket -from typing import Optional - -import curio -import curio.io - -from .._exceptions import ( - ConnectError, - ConnectTimeout, - ReadError, - ReadTimeout, - WriteError, - WriteTimeout, - map_exceptions, -) -from .._types import TimeoutDict -from .._utils import get_logger, is_socket_readable -from .base import AsyncBackend, AsyncLock, AsyncSemaphore, AsyncSocketStream - -logger = get_logger(__name__) - -ONE_DAY_IN_SECONDS = float(60 * 60 * 24) - - -def convert_timeout(value: Optional[float]) -> float: - return value if value is not None else ONE_DAY_IN_SECONDS - - -class Lock(AsyncLock): - def __init__(self) -> None: - self._lock = curio.Lock() - - async def acquire(self) -> None: - await self._lock.acquire() - - async def release(self) -> None: - await self._lock.release() - - -class Semaphore(AsyncSemaphore): - def __init__(self, max_value: int, exc_class: type) -> None: - self.max_value = max_value - self.exc_class = exc_class - - @property - def semaphore(self) -> curio.Semaphore: - if not hasattr(self, "_semaphore"): - self._semaphore = curio.Semaphore(value=self.max_value) - return self._semaphore - - async def acquire(self, timeout: float = None) -> None: - timeout = convert_timeout(timeout) - - try: - return await curio.timeout_after(timeout, self.semaphore.acquire()) - except curio.TaskTimeout: - raise self.exc_class() - - async def release(self) -> None: - await self.semaphore.release() - - -class SocketStream(AsyncSocketStream): - def __init__(self, socket: curio.io.Socket) -> None: - self.read_lock = curio.Lock() - self.write_lock = curio.Lock() - self.socket = socket - self.stream = socket.as_stream() - - def get_http_version(self) -> str: - if hasattr(self.socket, "_socket"): - raw_socket = self.socket._socket - - if isinstance(raw_socket, SSLSocket): - ident = raw_socket.selected_alpn_protocol() - return "HTTP/2" if ident == "h2" else "HTTP/1.1" - - return "HTTP/1.1" - - async def start_tls( - self, hostname: bytes, ssl_context: SSLContext, timeout: TimeoutDict - ) -> "AsyncSocketStream": - connect_timeout = convert_timeout(timeout.get("connect")) - exc_map = { - curio.TaskTimeout: ConnectTimeout, - curio.CurioError: ConnectError, - OSError: ConnectError, - } - - with map_exceptions(exc_map): - wrapped_sock = curio.io.Socket( - ssl_context.wrap_socket( - self.socket._socket, - do_handshake_on_connect=False, - server_hostname=hostname.decode("ascii"), - ) - ) - - await curio.timeout_after( - connect_timeout, - wrapped_sock.do_handshake(), - ) - - return SocketStream(wrapped_sock) - - async def read(self, n: int, timeout: TimeoutDict) -> bytes: - read_timeout = convert_timeout(timeout.get("read")) - exc_map = { - curio.TaskTimeout: ReadTimeout, - curio.CurioError: ReadError, - OSError: ReadError, - } - - with map_exceptions(exc_map): - async with self.read_lock: - return await curio.timeout_after(read_timeout, self.stream.read(n)) - - async def write(self, data: bytes, timeout: TimeoutDict) -> None: - write_timeout = convert_timeout(timeout.get("write")) - exc_map = { - curio.TaskTimeout: WriteTimeout, - curio.CurioError: WriteError, - OSError: WriteError, - } - - with map_exceptions(exc_map): - async with self.write_lock: - await curio.timeout_after(write_timeout, self.stream.write(data)) - - async def aclose(self) -> None: - await self.stream.close() - await self.socket.close() - - def is_readable(self) -> bool: - return is_socket_readable(self.socket) - - -class CurioBackend(AsyncBackend): - async def open_tcp_stream( - self, - hostname: bytes, - port: int, - ssl_context: Optional[SSLContext], - timeout: TimeoutDict, - *, - local_address: Optional[str], - ) -> AsyncSocketStream: - connect_timeout = convert_timeout(timeout.get("connect")) - exc_map = { - curio.TaskTimeout: ConnectTimeout, - curio.CurioError: ConnectError, - OSError: ConnectError, - } - host = hostname.decode("ascii") - - kwargs: dict = {} - if ssl_context is not None: - kwargs["ssl"] = ssl_context - kwargs["server_hostname"] = host - if local_address is not None: - kwargs["source_addr"] = (local_address, 0) - - with map_exceptions(exc_map): - sock: curio.io.Socket = await curio.timeout_after( - connect_timeout, - curio.open_connection(hostname, port, **kwargs), - ) - - return SocketStream(sock) - - async def open_uds_stream( - self, - path: str, - hostname: bytes, - ssl_context: Optional[SSLContext], - timeout: TimeoutDict, - ) -> AsyncSocketStream: - connect_timeout = convert_timeout(timeout.get("connect")) - exc_map = { - curio.TaskTimeout: ConnectTimeout, - curio.CurioError: ConnectError, - OSError: ConnectError, - } - host = hostname.decode("ascii") - kwargs = ( - {} if ssl_context is None else {"ssl": ssl_context, "server_hostname": host} - ) - - with map_exceptions(exc_map): - sock: curio.io.Socket = await curio.timeout_after( - connect_timeout, curio.open_unix_connection(path, **kwargs) - ) - - return SocketStream(sock) - - def create_lock(self) -> AsyncLock: - return Lock() - - def create_semaphore(self, max_value: int, exc_class: type) -> AsyncSemaphore: - return Semaphore(max_value, exc_class) - - async def time(self) -> float: - return await curio.clock() - - async def sleep(self, seconds: float) -> None: - await curio.sleep(seconds) diff --git a/packages/httpcore/_backends/mock.py b/packages/httpcore/_backends/mock.py new file mode 100644 index 000000000..f7aefebf5 --- /dev/null +++ b/packages/httpcore/_backends/mock.py @@ -0,0 +1,142 @@ +import ssl +import typing +from typing import Optional + +from .._exceptions import ReadError +from .base import ( + SOCKET_OPTION, + AsyncNetworkBackend, + AsyncNetworkStream, + NetworkBackend, + NetworkStream, +) + + +class MockSSLObject: + def __init__(self, http2: bool): + self._http2 = http2 + + def selected_alpn_protocol(self) -> str: + return "h2" if self._http2 else "http/1.1" + + +class MockStream(NetworkStream): + def __init__(self, buffer: typing.List[bytes], http2: bool = False) -> None: + self._buffer = buffer + self._http2 = http2 + self._closed = False + + def read(self, max_bytes: int, timeout: Optional[float] = None) -> bytes: + if self._closed: + raise ReadError("Connection closed") + if not self._buffer: + return b"" + return self._buffer.pop(0) + + def write(self, buffer: bytes, timeout: Optional[float] = None) -> None: + pass + + def close(self) -> None: + self._closed = True + + def start_tls( + self, + ssl_context: ssl.SSLContext, + server_hostname: Optional[str] = None, + timeout: Optional[float] = None, + ) -> NetworkStream: + return self + + def get_extra_info(self, info: str) -> typing.Any: + return MockSSLObject(http2=self._http2) if info == "ssl_object" else None + + def __repr__(self) -> str: + return "" + + +class MockBackend(NetworkBackend): + def __init__(self, buffer: typing.List[bytes], http2: bool = False) -> None: + self._buffer = buffer + self._http2 = http2 + + def connect_tcp( + self, + host: str, + port: int, + timeout: Optional[float] = None, + local_address: Optional[str] = None, + socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, + ) -> NetworkStream: + return MockStream(list(self._buffer), http2=self._http2) + + def connect_unix_socket( + self, + path: str, + timeout: Optional[float] = None, + socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, + ) -> NetworkStream: + return MockStream(list(self._buffer), http2=self._http2) + + def sleep(self, seconds: float) -> None: + pass + + +class AsyncMockStream(AsyncNetworkStream): + def __init__(self, buffer: typing.List[bytes], http2: bool = False) -> None: + self._buffer = buffer + self._http2 = http2 + self._closed = False + + async def read(self, max_bytes: int, timeout: Optional[float] = None) -> bytes: + if self._closed: + raise ReadError("Connection closed") + if not self._buffer: + return b"" + return self._buffer.pop(0) + + async def write(self, buffer: bytes, timeout: Optional[float] = None) -> None: + pass + + async def aclose(self) -> None: + self._closed = True + + async def start_tls( + self, + ssl_context: ssl.SSLContext, + server_hostname: Optional[str] = None, + timeout: Optional[float] = None, + ) -> AsyncNetworkStream: + return self + + def get_extra_info(self, info: str) -> typing.Any: + return MockSSLObject(http2=self._http2) if info == "ssl_object" else None + + def __repr__(self) -> str: + return "" + + +class AsyncMockBackend(AsyncNetworkBackend): + def __init__(self, buffer: typing.List[bytes], http2: bool = False) -> None: + self._buffer = buffer + self._http2 = http2 + + async def connect_tcp( + self, + host: str, + port: int, + timeout: Optional[float] = None, + local_address: Optional[str] = None, + socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, + ) -> AsyncNetworkStream: + return AsyncMockStream(list(self._buffer), http2=self._http2) + + async def connect_unix_socket( + self, + path: str, + timeout: Optional[float] = None, + socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, + ) -> AsyncNetworkStream: + return AsyncMockStream(list(self._buffer), http2=self._http2) + + async def sleep(self, seconds: float) -> None: + pass diff --git a/packages/httpcore/_backends/sync.py b/packages/httpcore/_backends/sync.py index 968aead9e..da36bb1ab 100644 --- a/packages/httpcore/_backends/sync.py +++ b/packages/httpcore/_backends/sync.py @@ -1,182 +1,248 @@ import socket -import threading -import time -from ssl import SSLContext -from types import TracebackType -from typing import Optional, Type +import ssl +import sys +import typing +from functools import partial from .._exceptions import ( ConnectError, ConnectTimeout, + ExceptionMapping, ReadError, ReadTimeout, WriteError, WriteTimeout, map_exceptions, ) -from .tcp_keep_alive import enable_tcp_keep_alive -from .._types import TimeoutDict from .._utils import is_socket_readable +from .base import SOCKET_OPTION, NetworkBackend, NetworkStream +from .tcp_keep_alive import enable_tcp_keep_alive -class SyncSocketStream: +class TLSinTLSStream(NetworkStream): # pragma: no cover """ - A socket stream with read/write operations. Abstracts away any asyncio-specific - interfaces into a more generic base class, that we can use with alternate - backends, or for stand-alone test cases. + Because the standard `SSLContext.wrap_socket` method does + not work for `SSLSocket` objects, we need this class + to implement TLS stream using an underlying `SSLObject` + instance in order to support TLS on top of TLS. """ - def __init__(self, sock: socket.socket) -> None: - self.sock = sock - self.read_lock = threading.Lock() - self.write_lock = threading.Lock() + # Defined in RFC 8449 + TLS_RECORD_SIZE = 16384 - def get_http_version(self) -> str: - selected_alpn_protocol = getattr(self.sock, "selected_alpn_protocol", None) - if selected_alpn_protocol is not None: - ident = selected_alpn_protocol() - return "HTTP/2" if ident == "h2" else "HTTP/1.1" - return "HTTP/1.1" - - def start_tls( - self, hostname: bytes, ssl_context: SSLContext, timeout: TimeoutDict - ) -> "SyncSocketStream": - connect_timeout = timeout.get("connect") - exc_map = {socket.timeout: ConnectTimeout, socket.error: ConnectError} + def __init__( + self, + sock: socket.socket, + ssl_context: ssl.SSLContext, + server_hostname: typing.Optional[str] = None, + timeout: typing.Optional[float] = None, + ): + self._sock = sock + self._incoming = ssl.MemoryBIO() + self._outgoing = ssl.MemoryBIO() + + self.ssl_obj = ssl_context.wrap_bio( + incoming=self._incoming, + outgoing=self._outgoing, + server_hostname=server_hostname, + ) + + self._sock.settimeout(timeout) + self._perform_io(self.ssl_obj.do_handshake) + + def _perform_io( + self, + func: typing.Callable[..., typing.Any], + ) -> typing.Any: + ret = None - with map_exceptions(exc_map): - self.sock.settimeout(connect_timeout) - wrapped = ssl_context.wrap_socket( - self.sock, server_hostname=hostname.decode("ascii") - ) + while True: + errno = None + try: + ret = func() + except (ssl.SSLWantReadError, ssl.SSLWantWriteError) as e: + errno = e.errno - return SyncSocketStream(wrapped) + self._sock.sendall(self._outgoing.read()) - def read(self, n: int, timeout: TimeoutDict) -> bytes: - read_timeout = timeout.get("read") - exc_map = {socket.timeout: ReadTimeout, socket.error: ReadError} + if errno == ssl.SSL_ERROR_WANT_READ: + buf = self._sock.recv(self.TLS_RECORD_SIZE) - with self.read_lock: - with map_exceptions(exc_map): - self.sock.settimeout(read_timeout) - return self.sock.recv(n) + if buf: + self._incoming.write(buf) + else: + self._incoming.write_eof() + if errno is None: + return ret - def write(self, data: bytes, timeout: TimeoutDict) -> None: - write_timeout = timeout.get("write") - exc_map = {socket.timeout: WriteTimeout, socket.error: WriteError} + def read(self, max_bytes: int, timeout: typing.Optional[float] = None) -> bytes: + exc_map: ExceptionMapping = {socket.timeout: ReadTimeout, OSError: ReadError} + with map_exceptions(exc_map): + self._sock.settimeout(timeout) + return typing.cast( + bytes, self._perform_io(partial(self.ssl_obj.read, max_bytes)) + ) - with self.write_lock: - with map_exceptions(exc_map): - while data: - self.sock.settimeout(write_timeout) - n = self.sock.send(data) - data = data[n:] + def write(self, buffer: bytes, timeout: typing.Optional[float] = None) -> None: + exc_map: ExceptionMapping = {socket.timeout: WriteTimeout, OSError: WriteError} + with map_exceptions(exc_map): + self._sock.settimeout(timeout) + while buffer: + nsent = self._perform_io(partial(self.ssl_obj.write, buffer)) + buffer = buffer[nsent:] def close(self) -> None: - with self.write_lock: - try: - self.sock.close() - except socket.error: - pass - - def is_readable(self) -> bool: - return is_socket_readable(self.sock) + self._sock.close() - -class SyncLock: - def __init__(self) -> None: - self._lock = threading.Lock() - - def __enter__(self) -> None: - self.acquire() - - def __exit__( + def start_tls( self, - exc_type: Type[BaseException] = None, - exc_value: BaseException = None, - traceback: TracebackType = None, - ) -> None: - self.release() - - def release(self) -> None: - self._lock.release() - - def acquire(self) -> None: - self._lock.acquire() + ssl_context: ssl.SSLContext, + server_hostname: typing.Optional[str] = None, + timeout: typing.Optional[float] = None, + ) -> "NetworkStream": + raise NotImplementedError() + + def get_extra_info(self, info: str) -> typing.Any: + if info == "ssl_object": + return self.ssl_obj + if info == "client_addr": + return self._sock.getsockname() + if info == "server_addr": + return self._sock.getpeername() + if info == "socket": + return self._sock + if info == "is_readable": + return is_socket_readable(self._sock) + return None + + +class SyncStream(NetworkStream): + def __init__(self, sock: socket.socket) -> None: + self._sock = sock + def read(self, max_bytes: int, timeout: typing.Optional[float] = None) -> bytes: + exc_map: ExceptionMapping = {socket.timeout: ReadTimeout, OSError: ReadError} + with map_exceptions(exc_map): + self._sock.settimeout(timeout) + return self._sock.recv(max_bytes) -class SyncSemaphore: - def __init__(self, max_value: int, exc_class: type) -> None: - self.max_value = max_value - self.exc_class = exc_class - self._semaphore = threading.Semaphore(max_value) + def write(self, buffer: bytes, timeout: typing.Optional[float] = None) -> None: + if not buffer: + return - def acquire(self, timeout: float = None) -> None: - if not self._semaphore.acquire(timeout=timeout): # type: ignore - raise self.exc_class() + exc_map: ExceptionMapping = {socket.timeout: WriteTimeout, OSError: WriteError} + with map_exceptions(exc_map): + while buffer: + self._sock.settimeout(timeout) + n = self._sock.send(buffer) + buffer = buffer[n:] - def release(self) -> None: - self._semaphore.release() + def close(self) -> None: + self._sock.close() + def start_tls( + self, + ssl_context: ssl.SSLContext, + server_hostname: typing.Optional[str] = None, + timeout: typing.Optional[float] = None, + ) -> NetworkStream: + if isinstance(self._sock, ssl.SSLSocket): # pragma: no cover + raise RuntimeError( + "Attempted to add a TLS layer on top of the existing " + "TLS stream, which is not supported by httpcore package" + ) -class SyncBackend: - def open_tcp_stream( + exc_map: ExceptionMapping = { + socket.timeout: ConnectTimeout, + OSError: ConnectError, + } + with map_exceptions(exc_map): + try: + if isinstance(self._sock, ssl.SSLSocket): # pragma: no cover + # If the underlying socket has already been upgraded + # to the TLS layer (i.e. is an instance of SSLSocket), + # we need some additional smarts to support TLS-in-TLS. + return TLSinTLSStream( + self._sock, ssl_context, server_hostname, timeout + ) + else: + self._sock.settimeout(timeout) + sock = ssl_context.wrap_socket( + self._sock, server_hostname=server_hostname + ) + except Exception as exc: # pragma: nocover + self.close() + raise exc + return SyncStream(sock) + + def get_extra_info(self, info: str) -> typing.Any: + if info == "ssl_object" and isinstance(self._sock, ssl.SSLSocket): + return self._sock._sslobj # type: ignore + if info == "client_addr": + return self._sock.getsockname() + if info == "server_addr": + return self._sock.getpeername() + if info == "socket": + return self._sock + if info == "is_readable": + return is_socket_readable(self._sock) + return None + + +class SyncBackend(NetworkBackend): + def connect_tcp( self, - hostname: bytes, + host: str, port: int, - ssl_context: Optional[SSLContext], - timeout: TimeoutDict, - *, - local_address: Optional[str], - ) -> SyncSocketStream: - address = (hostname.decode("ascii"), port) - connect_timeout = timeout.get("connect") + timeout: typing.Optional[float] = None, + local_address: typing.Optional[str] = None, + socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, + ) -> NetworkStream: + # Note that we automatically include `TCP_NODELAY` + # in addition to any other custom socket options. + if socket_options is None: + socket_options = [] # pragma: no cover + address = (host, port) source_address = None if local_address is None else (local_address, 0) - exc_map = {socket.timeout: ConnectTimeout, socket.error: ConnectError} + exc_map: ExceptionMapping = { + socket.timeout: ConnectTimeout, + OSError: ConnectError, + } with map_exceptions(exc_map): sock = socket.create_connection( - address, connect_timeout, source_address=source_address # type: ignore + address, + timeout, + source_address=source_address, ) + for option in socket_options: + sock.setsockopt(*option) # pragma: no cover + sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) # Enable TCP Keep-Alive enable_tcp_keep_alive(sock) + return SyncStream(sock) - if ssl_context is not None: - sock = ssl_context.wrap_socket( - sock, server_hostname=hostname.decode("ascii") - ) - return SyncSocketStream(sock=sock) - - def open_uds_stream( + def connect_unix_socket( self, path: str, - hostname: bytes, - ssl_context: Optional[SSLContext], - timeout: TimeoutDict, - ) -> SyncSocketStream: - connect_timeout = timeout.get("connect") - exc_map = {socket.timeout: ConnectTimeout, socket.error: ConnectError} + timeout: typing.Optional[float] = None, + socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, + ) -> NetworkStream: # pragma: nocover + if sys.platform == "win32": + raise RuntimeError( + "Attempted to connect to a UNIX socket on a Windows system." + ) + if socket_options is None: + socket_options = [] + exc_map: ExceptionMapping = { + socket.timeout: ConnectTimeout, + OSError: ConnectError, + } with map_exceptions(exc_map): sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) - sock.settimeout(connect_timeout) + for option in socket_options: + sock.setsockopt(*option) + sock.settimeout(timeout) sock.connect(path) - - if ssl_context is not None: - sock = ssl_context.wrap_socket( - sock, server_hostname=hostname.decode("ascii") - ) - - return SyncSocketStream(sock=sock) - - def create_lock(self) -> SyncLock: - return SyncLock() - - def create_semaphore(self, max_value: int, exc_class: type) -> SyncSemaphore: - return SyncSemaphore(max_value, exc_class=exc_class) - - def time(self) -> float: - return time.monotonic() - - def sleep(self, seconds: float) -> None: - time.sleep(seconds) + return SyncStream(sock) diff --git a/packages/httpcore/_backends/trio.py b/packages/httpcore/_backends/trio.py index d6e67c2e3..b1626d28e 100644 --- a/packages/httpcore/_backends/trio.py +++ b/packages/httpcore/_backends/trio.py @@ -1,212 +1,161 @@ -from ssl import SSLContext -from typing import Optional +import ssl +import typing import trio from .._exceptions import ( ConnectError, ConnectTimeout, + ExceptionMapping, ReadError, ReadTimeout, WriteError, WriteTimeout, map_exceptions, ) -from .._types import TimeoutDict -from .base import AsyncBackend, AsyncLock, AsyncSemaphore, AsyncSocketStream +from .base import SOCKET_OPTION, AsyncNetworkBackend, AsyncNetworkStream -def none_as_inf(value: Optional[float]) -> float: - return value if value is not None else float("inf") - - -class SocketStream(AsyncSocketStream): +class TrioStream(AsyncNetworkStream): def __init__(self, stream: trio.abc.Stream) -> None: - self.stream = stream - self.read_lock = trio.Lock() - self.write_lock = trio.Lock() + self._stream = stream + + async def read( + self, max_bytes: int, timeout: typing.Optional[float] = None + ) -> bytes: + timeout_or_inf = float("inf") if timeout is None else timeout + exc_map: ExceptionMapping = { + trio.TooSlowError: ReadTimeout, + trio.BrokenResourceError: ReadError, + trio.ClosedResourceError: ReadError, + } + with map_exceptions(exc_map): + with trio.fail_after(timeout_or_inf): + data: bytes = await self._stream.receive_some(max_bytes=max_bytes) + return data + + async def write( + self, buffer: bytes, timeout: typing.Optional[float] = None + ) -> None: + if not buffer: + return - def get_http_version(self) -> str: - if not isinstance(self.stream, trio.SSLStream): - return "HTTP/1.1" + timeout_or_inf = float("inf") if timeout is None else timeout + exc_map: ExceptionMapping = { + trio.TooSlowError: WriteTimeout, + trio.BrokenResourceError: WriteError, + trio.ClosedResourceError: WriteError, + } + with map_exceptions(exc_map): + with trio.fail_after(timeout_or_inf): + await self._stream.send_all(data=buffer) - ident = self.stream.selected_alpn_protocol() - return "HTTP/2" if ident == "h2" else "HTTP/1.1" + async def aclose(self) -> None: + await self._stream.aclose() async def start_tls( - self, hostname: bytes, ssl_context: SSLContext, timeout: TimeoutDict - ) -> "SocketStream": - connect_timeout = none_as_inf(timeout.get("connect")) - exc_map = { + self, + ssl_context: ssl.SSLContext, + server_hostname: typing.Optional[str] = None, + timeout: typing.Optional[float] = None, + ) -> AsyncNetworkStream: + timeout_or_inf = float("inf") if timeout is None else timeout + exc_map: ExceptionMapping = { trio.TooSlowError: ConnectTimeout, trio.BrokenResourceError: ConnectError, } ssl_stream = trio.SSLStream( - self.stream, + self._stream, ssl_context=ssl_context, - server_hostname=hostname.decode("ascii"), + server_hostname=server_hostname, + https_compatible=True, + server_side=False, ) - with map_exceptions(exc_map): - with trio.fail_after(connect_timeout): - await ssl_stream.do_handshake() - return SocketStream(ssl_stream) - - async def read(self, n: int, timeout: TimeoutDict) -> bytes: - read_timeout = none_as_inf(timeout.get("read")) - exc_map = {trio.TooSlowError: ReadTimeout, trio.BrokenResourceError: ReadError} - - async with self.read_lock: - with map_exceptions(exc_map): - try: - with trio.fail_after(read_timeout): - return await self.stream.receive_some(max_bytes=n) - except trio.TooSlowError as exc: - await self.stream.aclose() - raise exc - - async def write(self, data: bytes, timeout: TimeoutDict) -> None: - if not data: - return - - write_timeout = none_as_inf(timeout.get("write")) - exc_map = { - trio.TooSlowError: WriteTimeout, - trio.BrokenResourceError: WriteError, - } - - async with self.write_lock: - with map_exceptions(exc_map): - try: - with trio.fail_after(write_timeout): - return await self.stream.send_all(data) - except trio.TooSlowError as exc: - await self.stream.aclose() - raise exc - - async def aclose(self) -> None: - async with self.write_lock: try: - await self.stream.aclose() - except trio.BrokenResourceError: - pass - - def is_readable(self) -> bool: - # Adapted from: https://github.com/encode/httpx/pull/143#issuecomment-515202982 - stream = self.stream - - # Peek through any SSLStream wrappers to get the underlying SocketStream. + with trio.fail_after(timeout_or_inf): + await ssl_stream.do_handshake() + except Exception as exc: # pragma: nocover + await self.aclose() + raise exc + return TrioStream(ssl_stream) + + def get_extra_info(self, info: str) -> typing.Any: + if info == "ssl_object" and isinstance(self._stream, trio.SSLStream): + # Type checkers cannot see `_ssl_object` attribute because trio._ssl.SSLStream uses __getattr__/__setattr__. + # Tracked at https://github.com/python-trio/trio/issues/542 + return self._stream._ssl_object # type: ignore[attr-defined] + if info == "client_addr": + return self._get_socket_stream().socket.getsockname() + if info == "server_addr": + return self._get_socket_stream().socket.getpeername() + if info == "socket": + stream = self._stream + while isinstance(stream, trio.SSLStream): + stream = stream.transport_stream + assert isinstance(stream, trio.SocketStream) + return stream.socket + if info == "is_readable": + socket = self.get_extra_info("socket") + return socket.is_readable() + return None + + def _get_socket_stream(self) -> trio.SocketStream: + stream = self._stream while isinstance(stream, trio.SSLStream): stream = stream.transport_stream assert isinstance(stream, trio.SocketStream) - - return stream.socket.is_readable() + return stream -class Lock(AsyncLock): - def __init__(self) -> None: - self._lock = trio.Lock() - - async def release(self) -> None: - self._lock.release() - - async def acquire(self) -> None: - await self._lock.acquire() - - -class Semaphore(AsyncSemaphore): - def __init__(self, max_value: int, exc_class: type): - self.max_value = max_value - self.exc_class = exc_class - - @property - def semaphore(self) -> trio.Semaphore: - if not hasattr(self, "_semaphore"): - self._semaphore = trio.Semaphore(self.max_value, max_value=self.max_value) - return self._semaphore - - async def acquire(self, timeout: float = None) -> None: - timeout = none_as_inf(timeout) - - with trio.move_on_after(timeout): - await self.semaphore.acquire() - return - - raise self.exc_class() - - async def release(self) -> None: - self.semaphore.release() - - -class TrioBackend(AsyncBackend): - async def open_tcp_stream( +class TrioBackend(AsyncNetworkBackend): + async def connect_tcp( self, - hostname: bytes, + host: str, port: int, - ssl_context: Optional[SSLContext], - timeout: TimeoutDict, - *, - local_address: Optional[str], - ) -> AsyncSocketStream: - connect_timeout = none_as_inf(timeout.get("connect")) - # Trio will support local_address from 0.16.1 onwards. - # We only include the keyword argument if a local_address - #  argument has been passed. - kwargs: dict = {} if local_address is None else {"local_address": local_address} - exc_map = { - OSError: ConnectError, + timeout: typing.Optional[float] = None, + local_address: typing.Optional[str] = None, + socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, + ) -> AsyncNetworkStream: + # By default for TCP sockets, trio enables TCP_NODELAY. + # https://trio.readthedocs.io/en/stable/reference-io.html#trio.SocketStream + if socket_options is None: + socket_options = [] # pragma: no cover + timeout_or_inf = float("inf") if timeout is None else timeout + exc_map: ExceptionMapping = { trio.TooSlowError: ConnectTimeout, trio.BrokenResourceError: ConnectError, + OSError: ConnectError, } - with map_exceptions(exc_map): - with trio.fail_after(connect_timeout): + with trio.fail_after(timeout_or_inf): stream: trio.abc.Stream = await trio.open_tcp_stream( - hostname, port, **kwargs + host=host, port=port, local_address=local_address ) + for option in socket_options: + stream.setsockopt(*option) # type: ignore[attr-defined] # pragma: no cover + return TrioStream(stream) - if ssl_context is not None: - stream = trio.SSLStream( - stream, ssl_context, server_hostname=hostname.decode("ascii") - ) - await stream.do_handshake() - - return SocketStream(stream=stream) - - async def open_uds_stream( + async def connect_unix_socket( self, path: str, - hostname: bytes, - ssl_context: Optional[SSLContext], - timeout: TimeoutDict, - ) -> AsyncSocketStream: - connect_timeout = none_as_inf(timeout.get("connect")) - exc_map = { - OSError: ConnectError, + timeout: typing.Optional[float] = None, + socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, + ) -> AsyncNetworkStream: # pragma: nocover + if socket_options is None: + socket_options = [] + timeout_or_inf = float("inf") if timeout is None else timeout + exc_map: ExceptionMapping = { trio.TooSlowError: ConnectTimeout, trio.BrokenResourceError: ConnectError, + OSError: ConnectError, } - with map_exceptions(exc_map): - with trio.fail_after(connect_timeout): + with trio.fail_after(timeout_or_inf): stream: trio.abc.Stream = await trio.open_unix_socket(path) - - if ssl_context is not None: - stream = trio.SSLStream( - stream, ssl_context, server_hostname=hostname.decode("ascii") - ) - await stream.do_handshake() - - return SocketStream(stream=stream) - - def create_lock(self) -> AsyncLock: - return Lock() - - def create_semaphore(self, max_value: int, exc_class: type) -> AsyncSemaphore: - return Semaphore(max_value, exc_class=exc_class) - - async def time(self) -> float: - return trio.current_time() + for option in socket_options: + stream.setsockopt(*option) # type: ignore[attr-defined] # pragma: no cover + return TrioStream(stream) async def sleep(self, seconds: float) -> None: - await trio.sleep(seconds) + await trio.sleep(seconds) # pragma: nocover diff --git a/packages/httpcore/_bytestreams.py b/packages/httpcore/_bytestreams.py deleted file mode 100644 index 317f41103..000000000 --- a/packages/httpcore/_bytestreams.py +++ /dev/null @@ -1,96 +0,0 @@ -from typing import AsyncIterator, Callable, Iterator - -from ._async.base import AsyncByteStream -from ._sync.base import SyncByteStream - - -class ByteStream(AsyncByteStream, SyncByteStream): - """ - A concrete implementation for either sync or async byte streams. - - Example:: - - stream = httpcore.ByteStream(b"123") - - Parameters - ---------- - content: - A plain byte string used as the content of the stream. - """ - - def __init__(self, content: bytes) -> None: - self._content = content - - def __iter__(self) -> Iterator[bytes]: - yield self._content - - async def __aiter__(self) -> AsyncIterator[bytes]: - yield self._content - - -class IteratorByteStream(SyncByteStream): - """ - A concrete implementation for sync byte streams. - - Example:: - - def generate_content(): - yield b"Hello, world!" - ... - - stream = httpcore.IteratorByteStream(generate_content()) - - Parameters - ---------- - iterator: - A sync byte iterator, used as the content of the stream. - close_func: - An optional function called when closing the stream. - """ - - def __init__(self, iterator: Iterator[bytes], close_func: Callable = None) -> None: - self._iterator = iterator - self._close_func = close_func - - def __iter__(self) -> Iterator[bytes]: - for chunk in self._iterator: - yield chunk - - def close(self) -> None: - if self._close_func is not None: - self._close_func() - - -class AsyncIteratorByteStream(AsyncByteStream): - """ - A concrete implementation for async byte streams. - - Example:: - - async def generate_content(): - yield b"Hello, world!" - ... - - stream = httpcore.AsyncIteratorByteStream(generate_content()) - - Parameters - ---------- - aiterator: - An async byte iterator, used as the content of the stream. - aclose_func: - An optional async function called when closing the stream. - """ - - def __init__( - self, aiterator: AsyncIterator[bytes], aclose_func: Callable = None - ) -> None: - self._aiterator = aiterator - self._aclose_func = aclose_func - - async def __aiter__(self) -> AsyncIterator[bytes]: - async for chunk in self._aiterator: - yield chunk - - async def aclose(self) -> None: - if self._aclose_func is not None: - await self._aclose_func() diff --git a/packages/httpcore/_exceptions.py b/packages/httpcore/_exceptions.py index ba5682999..81e7fc61d 100644 --- a/packages/httpcore/_exceptions.py +++ b/packages/httpcore/_exceptions.py @@ -1,16 +1,26 @@ import contextlib -from typing import Dict, Iterator, Type +from typing import Iterator, Mapping, Type + +ExceptionMapping = Mapping[Type[Exception], Type[Exception]] @contextlib.contextmanager -def map_exceptions(map: Dict[Type[Exception], Type[Exception]]) -> Iterator[None]: +def map_exceptions(map: ExceptionMapping) -> Iterator[None]: try: yield except Exception as exc: # noqa: PIE786 for from_exc, to_exc in map.items(): if isinstance(exc, from_exc): - raise to_exc(exc) from None - raise + raise to_exc(exc) from exc + raise # pragma: nocover + + +class ConnectionNotAvailable(Exception): + pass + + +class ProxyError(Exception): + pass class UnsupportedProtocol(Exception): @@ -29,10 +39,6 @@ class LocalProtocolError(ProtocolError): pass -class ProxyError(Exception): - pass - - # Timeout errors @@ -73,7 +79,3 @@ class ReadError(NetworkError): class WriteError(NetworkError): pass - - -class CloseError(NetworkError): - pass diff --git a/packages/httpcore/_models.py b/packages/httpcore/_models.py new file mode 100644 index 000000000..11bfcd84f --- /dev/null +++ b/packages/httpcore/_models.py @@ -0,0 +1,484 @@ +from typing import ( + Any, + AsyncIterable, + AsyncIterator, + Iterable, + Iterator, + List, + Mapping, + MutableMapping, + Optional, + Sequence, + Tuple, + Union, +) +from urllib.parse import urlparse + +# Functions for typechecking... + + +HeadersAsSequence = Sequence[Tuple[Union[bytes, str], Union[bytes, str]]] +HeadersAsMapping = Mapping[Union[bytes, str], Union[bytes, str]] +HeaderTypes = Union[HeadersAsSequence, HeadersAsMapping, None] + +Extensions = MutableMapping[str, Any] + + +def enforce_bytes(value: Union[bytes, str], *, name: str) -> bytes: + """ + Any arguments that are ultimately represented as bytes can be specified + either as bytes or as strings. + + However we enforce that any string arguments must only contain characters in + the plain ASCII range. chr(0)...chr(127). If you need to use characters + outside that range then be precise, and use a byte-wise argument. + """ + if isinstance(value, str): + try: + return value.encode("ascii") + except UnicodeEncodeError: + raise TypeError(f"{name} strings may not include unicode characters.") + elif isinstance(value, bytes): + return value + + seen_type = type(value).__name__ + raise TypeError(f"{name} must be bytes or str, but got {seen_type}.") + + +def enforce_url(value: Union["URL", bytes, str], *, name: str) -> "URL": + """ + Type check for URL parameters. + """ + if isinstance(value, (bytes, str)): + return URL(value) + elif isinstance(value, URL): + return value + + seen_type = type(value).__name__ + raise TypeError(f"{name} must be a URL, bytes, or str, but got {seen_type}.") + + +def enforce_headers( + value: Union[HeadersAsMapping, HeadersAsSequence, None] = None, *, name: str +) -> List[Tuple[bytes, bytes]]: + """ + Convienence function that ensure all items in request or response headers + are either bytes or strings in the plain ASCII range. + """ + if value is None: + return [] + elif isinstance(value, Mapping): + return [ + ( + enforce_bytes(k, name="header name"), + enforce_bytes(v, name="header value"), + ) + for k, v in value.items() + ] + elif isinstance(value, Sequence): + return [ + ( + enforce_bytes(k, name="header name"), + enforce_bytes(v, name="header value"), + ) + for k, v in value + ] + + seen_type = type(value).__name__ + raise TypeError( + f"{name} must be a mapping or sequence of two-tuples, but got {seen_type}." + ) + + +def enforce_stream( + value: Union[bytes, Iterable[bytes], AsyncIterable[bytes], None], *, name: str +) -> Union[Iterable[bytes], AsyncIterable[bytes]]: + if value is None: + return ByteStream(b"") + elif isinstance(value, bytes): + return ByteStream(value) + return value + + +# * https://tools.ietf.org/html/rfc3986#section-3.2.3 +# * https://url.spec.whatwg.org/#url-miscellaneous +# * https://url.spec.whatwg.org/#scheme-state +DEFAULT_PORTS = { + b"ftp": 21, + b"http": 80, + b"https": 443, + b"ws": 80, + b"wss": 443, +} + + +def include_request_headers( + headers: List[Tuple[bytes, bytes]], + *, + url: "URL", + content: Union[None, bytes, Iterable[bytes], AsyncIterable[bytes]], +) -> List[Tuple[bytes, bytes]]: + headers_set = set(k.lower() for k, v in headers) + + if b"host" not in headers_set: + default_port = DEFAULT_PORTS.get(url.scheme) + if url.port is None or url.port == default_port: + header_value = url.host + else: + header_value = b"%b:%d" % (url.host, url.port) + headers = [(b"Host", header_value)] + headers + + if ( + content is not None + and b"content-length" not in headers_set + and b"transfer-encoding" not in headers_set + ): + if isinstance(content, bytes): + content_length = str(len(content)).encode("ascii") + headers += [(b"Content-Length", content_length)] + else: + headers += [(b"Transfer-Encoding", b"chunked")] # pragma: nocover + + return headers + + +# Interfaces for byte streams... + + +class ByteStream: + """ + A container for non-streaming content, and that supports both sync and async + stream iteration. + """ + + def __init__(self, content: bytes) -> None: + self._content = content + + def __iter__(self) -> Iterator[bytes]: + yield self._content + + async def __aiter__(self) -> AsyncIterator[bytes]: + yield self._content + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} [{len(self._content)} bytes]>" + + +class Origin: + def __init__(self, scheme: bytes, host: bytes, port: int) -> None: + self.scheme = scheme + self.host = host + self.port = port + + def __eq__(self, other: Any) -> bool: + return ( + isinstance(other, Origin) + and self.scheme == other.scheme + and self.host == other.host + and self.port == other.port + ) + + def __str__(self) -> str: + scheme = self.scheme.decode("ascii") + host = self.host.decode("ascii") + port = str(self.port) + return f"{scheme}://{host}:{port}" + + +class URL: + """ + Represents the URL against which an HTTP request may be made. + + The URL may either be specified as a plain string, for convienence: + + ```python + url = httpcore.URL("https://www.example.com/") + ``` + + Or be constructed with explicitily pre-parsed components: + + ```python + url = httpcore.URL(scheme=b'https', host=b'www.example.com', port=None, target=b'/') + ``` + + Using this second more explicit style allows integrations that are using + `httpcore` to pass through URLs that have already been parsed in order to use + libraries such as `rfc-3986` rather than relying on the stdlib. It also ensures + that URL parsing is treated identically at both the networking level and at any + higher layers of abstraction. + + The four components are important here, as they allow the URL to be precisely + specified in a pre-parsed format. They also allow certain types of request to + be created that could not otherwise be expressed. + + For example, an HTTP request to `http://www.example.com/` forwarded via a proxy + at `http://localhost:8080`... + + ```python + # Constructs an HTTP request with a complete URL as the target: + # GET https://www.example.com/ HTTP/1.1 + url = httpcore.URL( + scheme=b'http', + host=b'localhost', + port=8080, + target=b'https://www.example.com/' + ) + request = httpcore.Request( + method="GET", + url=url + ) + ``` + + Another example is constructing an `OPTIONS *` request... + + ```python + # Constructs an 'OPTIONS *' HTTP request: + # OPTIONS * HTTP/1.1 + url = httpcore.URL(scheme=b'https', host=b'www.example.com', target=b'*') + request = httpcore.Request(method="OPTIONS", url=url) + ``` + + This kind of request is not possible to formulate with a URL string, + because the `/` delimiter is always used to demark the target from the + host/port portion of the URL. + + For convenience, string-like arguments may be specified either as strings or + as bytes. However, once a request is being issue over-the-wire, the URL + components are always ultimately required to be a bytewise representation. + + In order to avoid any ambiguity over character encodings, when strings are used + as arguments, they must be strictly limited to the ASCII range `chr(0)`-`chr(127)`. + If you require a bytewise representation that is outside this range you must + handle the character encoding directly, and pass a bytes instance. + """ + + def __init__( + self, + url: Union[bytes, str] = "", + *, + scheme: Union[bytes, str] = b"", + host: Union[bytes, str] = b"", + port: Optional[int] = None, + target: Union[bytes, str] = b"", + ) -> None: + """ + Parameters: + url: The complete URL as a string or bytes. + scheme: The URL scheme as a string or bytes. + Typically either `"http"` or `"https"`. + host: The URL host as a string or bytes. Such as `"www.example.com"`. + port: The port to connect to. Either an integer or `None`. + target: The target of the HTTP request. Such as `"/items?search=red"`. + """ + if url: + parsed = urlparse(enforce_bytes(url, name="url")) + self.scheme = parsed.scheme + self.host = parsed.hostname or b"" + self.port = parsed.port + self.target = (parsed.path or b"/") + ( + b"?" + parsed.query if parsed.query else b"" + ) + else: + self.scheme = enforce_bytes(scheme, name="scheme") + self.host = enforce_bytes(host, name="host") + self.port = port + self.target = enforce_bytes(target, name="target") + + @property + def origin(self) -> Origin: + default_port = { + b"http": 80, + b"https": 443, + b"ws": 80, + b"wss": 443, + b"socks5": 1080, + }[self.scheme] + return Origin( + scheme=self.scheme, host=self.host, port=self.port or default_port + ) + + def __eq__(self, other: Any) -> bool: + return ( + isinstance(other, URL) + and other.scheme == self.scheme + and other.host == self.host + and other.port == self.port + and other.target == self.target + ) + + def __bytes__(self) -> bytes: + if self.port is None: + return b"%b://%b%b" % (self.scheme, self.host, self.target) + return b"%b://%b:%d%b" % (self.scheme, self.host, self.port, self.target) + + def __repr__(self) -> str: + return ( + f"{self.__class__.__name__}(scheme={self.scheme!r}, " + f"host={self.host!r}, port={self.port!r}, target={self.target!r})" + ) + + +class Request: + """ + An HTTP request. + """ + + def __init__( + self, + method: Union[bytes, str], + url: Union[URL, bytes, str], + *, + headers: HeaderTypes = None, + content: Union[bytes, Iterable[bytes], AsyncIterable[bytes], None] = None, + extensions: Optional[Extensions] = None, + ) -> None: + """ + Parameters: + method: The HTTP request method, either as a string or bytes. + For example: `GET`. + url: The request URL, either as a `URL` instance, or as a string or bytes. + For example: `"https://www.example.com".` + headers: The HTTP request headers. + content: The content of the response body. + extensions: A dictionary of optional extra information included on + the request. Possible keys include `"timeout"`, and `"trace"`. + """ + self.method: bytes = enforce_bytes(method, name="method") + self.url: URL = enforce_url(url, name="url") + self.headers: List[Tuple[bytes, bytes]] = enforce_headers( + headers, name="headers" + ) + self.stream: Union[Iterable[bytes], AsyncIterable[bytes]] = enforce_stream( + content, name="content" + ) + self.extensions = {} if extensions is None else extensions + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} [{self.method!r}]>" + + +class Response: + """ + An HTTP response. + """ + + def __init__( + self, + status: int, + *, + headers: HeaderTypes = None, + content: Union[bytes, Iterable[bytes], AsyncIterable[bytes], None] = None, + extensions: Optional[Extensions] = None, + ) -> None: + """ + Parameters: + status: The HTTP status code of the response. For example `200`. + headers: The HTTP response headers. + content: The content of the response body. + extensions: A dictionary of optional extra information included on + the responseself.Possible keys include `"http_version"`, + `"reason_phrase"`, and `"network_stream"`. + """ + self.status: int = status + self.headers: List[Tuple[bytes, bytes]] = enforce_headers( + headers, name="headers" + ) + self.stream: Union[Iterable[bytes], AsyncIterable[bytes]] = enforce_stream( + content, name="content" + ) + self.extensions = {} if extensions is None else extensions + + self._stream_consumed = False + + @property + def content(self) -> bytes: + if not hasattr(self, "_content"): + if isinstance(self.stream, Iterable): + raise RuntimeError( + "Attempted to access 'response.content' on a streaming response. " + "Call 'response.read()' first." + ) + else: + raise RuntimeError( + "Attempted to access 'response.content' on a streaming response. " + "Call 'await response.aread()' first." + ) + return self._content + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} [{self.status}]>" + + # Sync interface... + + def read(self) -> bytes: + if not isinstance(self.stream, Iterable): # pragma: nocover + raise RuntimeError( + "Attempted to read an asynchronous response using 'response.read()'. " + "You should use 'await response.aread()' instead." + ) + if not hasattr(self, "_content"): + self._content = b"".join([part for part in self.iter_stream()]) + return self._content + + def iter_stream(self) -> Iterator[bytes]: + if not isinstance(self.stream, Iterable): # pragma: nocover + raise RuntimeError( + "Attempted to stream an asynchronous response using 'for ... in " + "response.iter_stream()'. " + "You should use 'async for ... in response.aiter_stream()' instead." + ) + if self._stream_consumed: + raise RuntimeError( + "Attempted to call 'for ... in response.iter_stream()' more than once." + ) + self._stream_consumed = True + for chunk in self.stream: + yield chunk + + def close(self) -> None: + if not isinstance(self.stream, Iterable): # pragma: nocover + raise RuntimeError( + "Attempted to close an asynchronous response using 'response.close()'. " + "You should use 'await response.aclose()' instead." + ) + if hasattr(self.stream, "close"): + self.stream.close() + + # Async interface... + + async def aread(self) -> bytes: + if not isinstance(self.stream, AsyncIterable): # pragma: nocover + raise RuntimeError( + "Attempted to read an synchronous response using " + "'await response.aread()'. " + "You should use 'response.read()' instead." + ) + if not hasattr(self, "_content"): + self._content = b"".join([part async for part in self.aiter_stream()]) + return self._content + + async def aiter_stream(self) -> AsyncIterator[bytes]: + if not isinstance(self.stream, AsyncIterable): # pragma: nocover + raise RuntimeError( + "Attempted to stream an synchronous response using 'async for ... in " + "response.aiter_stream()'. " + "You should use 'for ... in response.iter_stream()' instead." + ) + if self._stream_consumed: + raise RuntimeError( + "Attempted to call 'async for ... in response.aiter_stream()' " + "more than once." + ) + self._stream_consumed = True + async for chunk in self.stream: + yield chunk + + async def aclose(self) -> None: + if not isinstance(self.stream, AsyncIterable): # pragma: nocover + raise RuntimeError( + "Attempted to close a synchronous response using " + "'await response.aclose()'. " + "You should use 'response.close()' instead." + ) + if hasattr(self.stream, "aclose"): + await self.stream.aclose() diff --git a/packages/httpcore/_ssl.py b/packages/httpcore/_ssl.py new file mode 100644 index 000000000..c99c5a679 --- /dev/null +++ b/packages/httpcore/_ssl.py @@ -0,0 +1,9 @@ +import ssl + +import certifi + + +def default_ssl_context() -> ssl.SSLContext: + context = ssl.create_default_context() + context.load_verify_locations(certifi.where()) + return context diff --git a/packages/httpcore/_sync/__init__.py b/packages/httpcore/_sync/__init__.py index e69de29bb..b476d76d9 100644 --- a/packages/httpcore/_sync/__init__.py +++ b/packages/httpcore/_sync/__init__.py @@ -0,0 +1,39 @@ +from .connection import HTTPConnection +from .connection_pool import ConnectionPool +from .http11 import HTTP11Connection +from .http_proxy import HTTPProxy +from .interfaces import ConnectionInterface + +try: + from .http2 import HTTP2Connection +except ImportError: # pragma: nocover + + class HTTP2Connection: # type: ignore + def __init__(self, *args, **kwargs) -> None: # type: ignore + raise RuntimeError( + "Attempted to use http2 support, but the `h2` package is not " + "installed. Use 'pip install httpcore[http2]'." + ) + + +try: + from .socks_proxy import SOCKSProxy +except ImportError: # pragma: nocover + + class SOCKSProxy: # type: ignore + def __init__(self, *args, **kwargs) -> None: # type: ignore + raise RuntimeError( + "Attempted to use SOCKS support, but the `socksio` package is not " + "installed. Use 'pip install httpcore[socks]'." + ) + + +__all__ = [ + "HTTPConnection", + "ConnectionPool", + "HTTPProxy", + "HTTP11Connection", + "HTTP2Connection", + "ConnectionInterface", + "SOCKSProxy", +] diff --git a/packages/httpcore/_sync/base.py b/packages/httpcore/_sync/base.py deleted file mode 100644 index 45ef4abfc..000000000 --- a/packages/httpcore/_sync/base.py +++ /dev/null @@ -1,122 +0,0 @@ -import enum -from types import TracebackType -from typing import Iterator, Tuple, Type - -from .._types import URL, Headers, T - - -class NewConnectionRequired(Exception): - pass - - -class ConnectionState(enum.IntEnum): - """ - PENDING READY - | | ^ - v V | - ACTIVE | - | | | - | V | - V IDLE-+ - FULL | - | | - V V - CLOSED - """ - - PENDING = 0 # Connection not yet acquired. - READY = 1 # Re-acquired from pool, about to send a request. - ACTIVE = 2 # Active requests. - FULL = 3 # Active requests, no more stream IDs available. - IDLE = 4 # No active requests. - CLOSED = 5 # Connection closed. - - -class SyncByteStream: - """ - The base interface for request and response bodies. - - Concrete implementations should subclass this class, and implement - the :meth:`__iter__` method, and optionally the :meth:`close` method. - """ - - def __iter__(self) -> Iterator[bytes]: - """ - Yield bytes representing the request or response body. - """ - yield b"" # pragma: nocover - - def close(self) -> None: - """ - Must be called by the client to indicate that the stream has been closed. - """ - pass # pragma: nocover - - def read(self) -> bytes: - try: - return b"".join([part for part in self]) - finally: - self.close() - - -class SyncHTTPTransport: - """ - The base interface for sending HTTP requests. - - Concrete implementations should subclass this class, and implement - the :meth:`handle_request` method, and optionally the :meth:`close` method. - """ - - def handle_request( - self, - method: bytes, - url: URL, - headers: Headers, - stream: SyncByteStream, - extensions: dict, - ) -> Tuple[int, Headers, SyncByteStream, dict]: - """ - The interface for sending a single HTTP request, and returning a response. - - Parameters - ---------- - method: - The HTTP method, such as ``b'GET'``. - url: - The URL as a 4-tuple of (scheme, host, port, path). - headers: - Any HTTP headers to send with the request. - stream: - The body of the HTTP request. - extensions: - A dictionary of optional extensions. - - Returns - ------- - status_code: - The HTTP status code, such as ``200``. - headers: - Any HTTP headers included on the response. - stream: - The body of the HTTP response. - extensions: - A dictionary of optional extensions. - """ - raise NotImplementedError() # pragma: nocover - - def close(self) -> None: - """ - Close the implementation, which should close any outstanding response streams, - and any keep alive connections. - """ - - def __enter__(self: T) -> T: - return self - - def __exit__( - self, - exc_type: Type[BaseException] = None, - exc_value: BaseException = None, - traceback: TracebackType = None, - ) -> None: - self.close() diff --git a/packages/httpcore/_sync/connection.py b/packages/httpcore/_sync/connection.py index 382a4f9f6..81e4172a2 100644 --- a/packages/httpcore/_sync/connection.py +++ b/packages/httpcore/_sync/connection.py @@ -1,158 +1,111 @@ -from ssl import SSLContext -from typing import List, Optional, Tuple, cast +import itertools +import logging +import ssl +from types import TracebackType +from typing import Iterable, Iterator, Optional, Type + +from .._backends.sync import SyncBackend +from .._backends.base import SOCKET_OPTION, NetworkBackend, NetworkStream +from .._exceptions import ConnectError, ConnectionNotAvailable, ConnectTimeout +from .._models import Origin, Request, Response +from .._ssl import default_ssl_context +from .._synchronization import Lock +from .._trace import Trace +from .http11 import HTTP11Connection +from .interfaces import ConnectionInterface -from .._backends.sync import SyncBackend, SyncLock, SyncSocketStream, SyncBackend -from .._exceptions import ConnectError, ConnectTimeout -from .._types import URL, Headers, Origin, TimeoutDict -from .._utils import exponential_backoff, get_logger, url_to_origin -from .base import SyncByteStream, SyncHTTPTransport, NewConnectionRequired -from .http import SyncBaseHTTPConnection -from .http11 import SyncHTTP11Connection +RETRIES_BACKOFF_FACTOR = 0.5 # 0s, 0.5s, 1s, 2s, 4s, etc. -logger = get_logger(__name__) -RETRIES_BACKOFF_FACTOR = 0.5 # 0s, 0.5s, 1s, 2s, 4s, etc. +logger = logging.getLogger("httpcore.connection") + + +def exponential_backoff(factor: float) -> Iterator[float]: + """ + Generate a geometric sequence that has a ratio of 2 and starts with 0. + For example: + - `factor = 2`: `0, 2, 4, 8, 16, 32, 64, ...` + - `factor = 3`: `0, 3, 6, 12, 24, 48, 96, ...` + """ + yield 0 + for n in itertools.count(): + yield factor * 2**n -class SyncHTTPConnection(SyncHTTPTransport): + +class HTTPConnection(ConnectionInterface): def __init__( self, origin: Origin, + ssl_context: Optional[ssl.SSLContext] = None, + keepalive_expiry: Optional[float] = None, http1: bool = True, http2: bool = False, - keepalive_expiry: float = None, - uds: str = None, - ssl_context: SSLContext = None, - socket: SyncSocketStream = None, - local_address: str = None, retries: int = 0, - backend: SyncBackend = None, - ): - self.origin = origin - self._http1_enabled = http1 - self._http2_enabled = http2 + local_address: Optional[str] = None, + uds: Optional[str] = None, + network_backend: Optional[NetworkBackend] = None, + socket_options: Optional[Iterable[SOCKET_OPTION]] = None, + ) -> None: + self._origin = origin + self._ssl_context = ssl_context self._keepalive_expiry = keepalive_expiry - self._uds = uds - self._ssl_context = SSLContext() if ssl_context is None else ssl_context - self.socket = socket - self._local_address = local_address + self._http1 = http1 + self._http2 = http2 self._retries = retries + self._local_address = local_address + self._uds = uds - alpn_protocols: List[str] = [] - if http1: - alpn_protocols.append("http/1.1") - if http2: - alpn_protocols.append("h2") - - self._ssl_context.set_alpn_protocols(alpn_protocols) - - self.connection: Optional[SyncBaseHTTPConnection] = None - self._is_http11 = False - self._is_http2 = False - self._connect_failed = False - self._expires_at: Optional[float] = None - self._backend = SyncBackend() if backend is None else backend - - def __repr__(self) -> str: - return f"" - - def info(self) -> str: - if self.connection is None: - return "Connection failed" if self._connect_failed else "Connecting" - return self.connection.info() - - def should_close(self) -> bool: - """ - Return `True` if the connection is in a state where it should be closed. - This occurs when any of the following occur: - - * There are no active requests on an HTTP/1.1 connection, and the underlying - socket is readable. The only valid state the socket can be readable in - if this occurs is when the b"" EOF marker is about to be returned, - indicating a server disconnect. - * There are no active requests being made and the keepalive timeout has passed. - """ - if self.connection is None: - return False - return self.connection.should_close() - - def is_idle(self) -> bool: - """ - Return `True` if the connection is currently idle. - """ - if self.connection is None: - return False - return self.connection.is_idle() + self._network_backend: NetworkBackend = ( + SyncBackend() if network_backend is None else network_backend + ) + self._connection: Optional[ConnectionInterface] = None + self._connect_failed: bool = False + self._request_lock = Lock() + self._socket_options = socket_options + + def handle_request(self, request: Request) -> Response: + if not self.can_handle_request(request.url.origin): + raise RuntimeError( + f"Attempted to send request to {request.url.origin} on connection to {self._origin}" + ) - def is_closed(self) -> bool: - if self.connection is None: - return self._connect_failed - return self.connection.is_closed() + with self._request_lock: + if self._connection is None: + try: + stream = self._connect(request) - def is_available(self) -> bool: - """ - Return `True` if the connection is currently able to accept an outgoing request. - This occurs when any of the following occur: - - * The connection has not yet been opened, and HTTP/2 support is enabled. - We don't *know* at this point if we'll end up on an HTTP/2 connection or - not, but we *might* do, so we indicate availability. - * The connection has been opened, and is currently idle. - * The connection is open, and is an HTTP/2 connection. The connection must - also not currently be exceeding the maximum number of allowable concurrent - streams and must not have exhausted the maximum total number of stream IDs. - """ - if self.connection is None: - return self._http2_enabled and not self.is_closed - return self.connection.is_available() - - @property - def request_lock(self) -> SyncLock: - # We do this lazily, to make sure backend autodetection always - # runs within an async context. - if not hasattr(self, "_request_lock"): - self._request_lock = self._backend.create_lock() - return self._request_lock - - def handle_request( - self, - method: bytes, - url: URL, - headers: Headers, - stream: SyncByteStream, - extensions: dict, - ) -> Tuple[int, Headers, SyncByteStream, dict]: - assert url_to_origin(url) == self.origin - timeout = cast(TimeoutDict, extensions.get("timeout", {})) - - with self.request_lock: - if self.connection is None: - if self._connect_failed: - raise NewConnectionRequired() - if not self.socket: - logger.trace( - "open_socket origin=%r timeout=%r", self.origin, timeout + ssl_object = stream.get_extra_info("ssl_object") + http2_negotiated = ( + ssl_object is not None + and ssl_object.selected_alpn_protocol() == "h2" ) - self.socket = self._open_socket(timeout) - self._create_connection(self.socket) - elif not self.connection.is_available(): - raise NewConnectionRequired() - - assert self.connection is not None - logger.trace( - "connection.handle_request method=%r url=%r headers=%r", - method, - url, - headers, - ) - return self.connection.handle_request( - method, url, headers, stream, extensions - ) + if http2_negotiated or (self._http2 and not self._http1): + from .http2 import HTTP2Connection + + self._connection = HTTP2Connection( + origin=self._origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + else: + self._connection = HTTP11Connection( + origin=self._origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + except Exception as exc: + self._connect_failed = True + raise exc + elif not self._connection.is_available(): + raise ConnectionNotAvailable() + + return self._connection.handle_request(request) - def _open_socket(self, timeout: TimeoutDict = None) -> SyncSocketStream: - scheme, hostname, port = self.origin - timeout = {} if timeout is None else timeout - ssl_context = self._ssl_context if scheme == b"https" else None + def _connect(self, request: Request) -> NetworkStream: + timeouts = request.extensions.get("timeout", {}) + sni_hostname = request.extensions.get("sni_hostname", None) + timeout = timeouts.get("connect", None) retries_left = self._retries delays = exponential_backoff(factor=RETRIES_BACKOFF_FACTOR) @@ -160,61 +113,110 @@ def _open_socket(self, timeout: TimeoutDict = None) -> SyncSocketStream: while True: try: if self._uds is None: - return self._backend.open_tcp_stream( - hostname, - port, - ssl_context, - timeout, - local_address=self._local_address, - ) + kwargs = { + "host": self._origin.host.decode("ascii"), + "port": self._origin.port, + "local_address": self._local_address, + "timeout": timeout, + "socket_options": self._socket_options, + } + with Trace("connect_tcp", logger, request, kwargs) as trace: + stream = self._network_backend.connect_tcp(**kwargs) + trace.return_value = stream else: - return self._backend.open_uds_stream( - self._uds, hostname, ssl_context, timeout + kwargs = { + "path": self._uds, + "timeout": timeout, + "socket_options": self._socket_options, + } + with Trace( + "connect_unix_socket", logger, request, kwargs + ) as trace: + stream = self._network_backend.connect_unix_socket( + **kwargs + ) + trace.return_value = stream + + if self._origin.scheme == b"https": + ssl_context = ( + default_ssl_context() + if self._ssl_context is None + else self._ssl_context ) + alpn_protocols = ["http/1.1", "h2"] if self._http2 else ["http/1.1"] + ssl_context.set_alpn_protocols(alpn_protocols) + + kwargs = { + "ssl_context": ssl_context, + "server_hostname": sni_hostname + or self._origin.host.decode("ascii"), + "timeout": timeout, + } + with Trace("start_tls", logger, request, kwargs) as trace: + stream = stream.start_tls(**kwargs) + trace.return_value = stream + return stream except (ConnectError, ConnectTimeout): if retries_left <= 0: - self._connect_failed = True raise retries_left -= 1 delay = next(delays) - self._backend.sleep(delay) - except Exception: # noqa: PIE786 - self._connect_failed = True - raise - - def _create_connection(self, socket: SyncSocketStream) -> None: - http_version = socket.get_http_version() - logger.trace( - "create_connection socket=%r http_version=%r", socket, http_version - ) - if http_version == "HTTP/2" or ( - self._http2_enabled and not self._http1_enabled - ): - from .http2 import SyncHTTP2Connection - - self._is_http2 = True - self.connection = SyncHTTP2Connection( - socket=socket, - keepalive_expiry=self._keepalive_expiry, - backend=self._backend, - ) - else: - self._is_http11 = True - self.connection = SyncHTTP11Connection( - socket=socket, keepalive_expiry=self._keepalive_expiry - ) + with Trace("retry", logger, request, kwargs) as trace: + self._network_backend.sleep(delay) - def start_tls( - self, hostname: bytes, ssl_context: SSLContext, timeout: TimeoutDict = None - ) -> None: - if self.connection is not None: - logger.trace("start_tls hostname=%r timeout=%r", hostname, timeout) - self.socket = self.connection.start_tls( - hostname, ssl_context, timeout - ) - logger.trace("start_tls complete hostname=%r timeout=%r", hostname, timeout) + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._origin def close(self) -> None: - with self.request_lock: - if self.connection is not None: - self.connection.close() + if self._connection is not None: + with Trace("close", logger, None, {}): + self._connection.close() + + def is_available(self) -> bool: + if self._connection is None: + # If HTTP/2 support is enabled, and the resulting connection could + # end up as HTTP/2 then we should indicate the connection as being + # available to service multiple requests. + return ( + self._http2 + and (self._origin.scheme == b"https" or not self._http1) + and not self._connect_failed + ) + return self._connection.is_available() + + def has_expired(self) -> bool: + if self._connection is None: + return self._connect_failed + return self._connection.has_expired() + + def is_idle(self) -> bool: + if self._connection is None: + return self._connect_failed + return self._connection.is_idle() + + def is_closed(self) -> bool: + if self._connection is None: + return self._connect_failed + return self._connection.is_closed() + + def info(self) -> str: + if self._connection is None: + return "CONNECTION FAILED" if self._connect_failed else "CONNECTING" + return self._connection.info() + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} [{self.info()}]>" + + # These context managers are not used in the standard flow, but are + # useful for testing or working with connection instances directly. + + def __enter__(self) -> "HTTPConnection": + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]] = None, + exc_value: Optional[BaseException] = None, + traceback: Optional[TracebackType] = None, + ) -> None: + self.close() diff --git a/packages/httpcore/_sync/connection_pool.py b/packages/httpcore/_sync/connection_pool.py index 22ca98fa4..dbcaff1fc 100644 --- a/packages/httpcore/_sync/connection_pool.py +++ b/packages/httpcore/_sync/connection_pool.py @@ -1,365 +1,356 @@ -import warnings -from ssl import SSLContext -from typing import ( - Iterator, - Callable, - Dict, - List, - Optional, - Set, - Tuple, - Union, - cast, -) - -from .._backends.sync import SyncBackend, SyncLock, SyncSemaphore -from .._backends.base import lookup_sync_backend -from .._exceptions import LocalProtocolError, PoolTimeout, UnsupportedProtocol -from .._threadlock import ThreadLock -from .._types import URL, Headers, Origin, TimeoutDict -from .._utils import get_logger, origin_to_url_string, url_to_origin -from .base import SyncByteStream, SyncHTTPTransport, NewConnectionRequired -from .connection import SyncHTTPConnection - -logger = get_logger(__name__) - - -class NullSemaphore(SyncSemaphore): - def __init__(self) -> None: - pass - - def acquire(self, timeout: float = None) -> None: - return - - def release(self) -> None: - return - - -class ResponseByteStream(SyncByteStream): - def __init__( - self, - stream: SyncByteStream, - connection: SyncHTTPConnection, - callback: Callable, - ) -> None: - """ - A wrapper around the response stream that we return from - `.handle_request()`. - - Ensures that when `stream.close()` is called, the connection pool - is notified via a callback. - """ - self.stream = stream +import ssl +import sys +from types import TracebackType +from typing import Iterable, Iterator, Iterable, List, Optional, Type + +from .._backends.sync import SyncBackend +from .._backends.base import SOCKET_OPTION, NetworkBackend +from .._exceptions import ConnectionNotAvailable, UnsupportedProtocol +from .._models import Origin, Request, Response +from .._synchronization import Event, Lock, ShieldCancellation +from .connection import HTTPConnection +from .interfaces import ConnectionInterface, RequestInterface + + +class RequestStatus: + def __init__(self, request: Request): + self.request = request + self.connection: Optional[ConnectionInterface] = None + self._connection_acquired = Event() + + def set_connection(self, connection: ConnectionInterface) -> None: + assert self.connection is None self.connection = connection - self.callback = callback + self._connection_acquired.set() - def __iter__(self) -> Iterator[bytes]: - for chunk in self.stream: - yield chunk + def unset_connection(self) -> None: + assert self.connection is not None + self.connection = None + self._connection_acquired = Event() - def close(self) -> None: - try: - # Call the underlying stream close callback. - # This will be a call to `SyncHTTP11Connection._response_closed()` - # or `SyncHTTP2Stream._response_closed()`. - self.stream.close() - finally: - # Call the connection pool close callback. - # This will be a call to `SyncConnectionPool._response_closed()`. - self.callback(self.connection) + def wait_for_connection( + self, timeout: Optional[float] = None + ) -> ConnectionInterface: + if self.connection is None: + self._connection_acquired.wait(timeout=timeout) + assert self.connection is not None + return self.connection -class SyncConnectionPool(SyncHTTPTransport): +class ConnectionPool(RequestInterface): """ A connection pool for making HTTP requests. - - Parameters - ---------- - ssl_context: - An SSL context to use for verifying connections. - max_connections: - The maximum number of concurrent connections to allow. - max_keepalive_connections: - The maximum number of connections to allow before closing keep-alive - connections. - keepalive_expiry: - The maximum time to allow before closing a keep-alive connection. - http1: - Enable/Disable HTTP/1.1 support. Defaults to True. - http2: - Enable/Disable HTTP/2 support. Defaults to False. - uds: - Path to a Unix Domain Socket to use instead of TCP sockets. - local_address: - Local address to connect from. Can also be used to connect using a particular - address family. Using ``local_address="0.0.0.0"`` will connect using an - ``AF_INET`` address (IPv4), while using ``local_address="::"`` will connect - using an ``AF_INET6`` address (IPv6). - retries: - The maximum number of retries when trying to establish a connection. - backend: - A name indicating which concurrency backend to use. """ def __init__( self, - ssl_context: SSLContext = None, - max_connections: int = None, - max_keepalive_connections: int = None, - keepalive_expiry: float = None, + ssl_context: Optional[ssl.SSLContext] = None, + max_connections: Optional[int] = 10, + max_keepalive_connections: Optional[int] = None, + keepalive_expiry: Optional[float] = None, http1: bool = True, http2: bool = False, - uds: str = None, - local_address: str = None, retries: int = 0, - max_keepalive: int = None, - backend: Union[SyncBackend, str] = "sync", - ): - if max_keepalive is not None: - warnings.warn( - "'max_keepalive' is deprecated. Use 'max_keepalive_connections'.", - DeprecationWarning, - ) - max_keepalive_connections = max_keepalive + local_address: Optional[str] = None, + uds: Optional[str] = None, + network_backend: Optional[NetworkBackend] = None, + socket_options: Optional[Iterable[SOCKET_OPTION]] = None, + ) -> None: + """ + A connection pool for making HTTP requests. + + Parameters: + ssl_context: An SSL context to use for verifying connections. + If not specified, the default `httpcore.default_ssl_context()` + will be used. + max_connections: The maximum number of concurrent HTTP connections that + the pool should allow. Any attempt to send a request on a pool that + would exceed this amount will block until a connection is available. + max_keepalive_connections: The maximum number of idle HTTP connections + that will be maintained in the pool. + keepalive_expiry: The duration in seconds that an idle HTTP connection + may be maintained for before being expired from the pool. + http1: A boolean indicating if HTTP/1.1 requests should be supported + by the connection pool. Defaults to True. + http2: A boolean indicating if HTTP/2 requests should be supported by + the connection pool. Defaults to False. + retries: The maximum number of retries when trying to establish a + connection. + local_address: Local address to connect from. Can also be used to connect + using a particular address family. Using `local_address="0.0.0.0"` + will connect using an `AF_INET` address (IPv4), while using + `local_address="::"` will connect using an `AF_INET6` address (IPv6). + uds: Path to a Unix Domain Socket to use instead of TCP sockets. + network_backend: A backend instance to use for handling network I/O. + socket_options: Socket options that have to be included + in the TCP socket when the connection was established. + """ + self._ssl_context = ssl_context - if isinstance(backend, str): - backend = lookup_sync_backend(backend) + self._max_connections = ( + sys.maxsize if max_connections is None else max_connections + ) + self._max_keepalive_connections = ( + sys.maxsize + if max_keepalive_connections is None + else max_keepalive_connections + ) + self._max_keepalive_connections = min( + self._max_connections, self._max_keepalive_connections + ) - self._ssl_context = SSLContext() if ssl_context is None else ssl_context - self._max_connections = max_connections - self._max_keepalive_connections = max_keepalive_connections self._keepalive_expiry = keepalive_expiry self._http1 = http1 self._http2 = http2 - self._uds = uds - self._local_address = local_address self._retries = retries - self._connections: Dict[Origin, Set[SyncHTTPConnection]] = {} - self._thread_lock = ThreadLock() - self._backend = backend - self._next_keepalive_check = 0.0 - - if not (http1 or http2): - raise ValueError("Either http1 or http2 must be True.") - - if http2: - try: - import h2 # noqa: F401 - except ImportError: - raise ImportError( - "Attempted to use http2=True, but the 'h2' " - "package is not installed. Use 'pip install httpcore[http2]'." - ) - - @property - def _connection_semaphore(self) -> SyncSemaphore: - # We do this lazily, to make sure backend autodetection always - # runs within an async context. - if not hasattr(self, "_internal_semaphore"): - if self._max_connections is not None: - self._internal_semaphore = self._backend.create_semaphore( - self._max_connections, exc_class=PoolTimeout - ) - else: - self._internal_semaphore = NullSemaphore() - - return self._internal_semaphore + self._local_address = local_address + self._uds = uds - @property - def _connection_acquiry_lock(self) -> SyncLock: - if not hasattr(self, "_internal_connection_acquiry_lock"): - self._internal_connection_acquiry_lock = self._backend.create_lock() - return self._internal_connection_acquiry_lock + self._pool: List[ConnectionInterface] = [] + self._requests: List[RequestStatus] = [] + self._pool_lock = Lock() + self._network_backend = ( + SyncBackend() if network_backend is None else network_backend + ) + self._socket_options = socket_options - def _create_connection( - self, - origin: Tuple[bytes, bytes, int], - ) -> SyncHTTPConnection: - return SyncHTTPConnection( + def create_connection(self, origin: Origin) -> ConnectionInterface: + return HTTPConnection( origin=origin, + ssl_context=self._ssl_context, + keepalive_expiry=self._keepalive_expiry, http1=self._http1, http2=self._http2, - keepalive_expiry=self._keepalive_expiry, - uds=self._uds, - ssl_context=self._ssl_context, - local_address=self._local_address, retries=self._retries, - backend=self._backend, + local_address=self._local_address, + uds=self._uds, + network_backend=self._network_backend, + socket_options=self._socket_options, ) - def handle_request( - self, - method: bytes, - url: URL, - headers: Headers, - stream: SyncByteStream, - extensions: dict, - ) -> Tuple[int, Headers, SyncByteStream, dict]: - if url[0] not in (b"http", b"https"): - scheme = url[0].decode("latin-1") - host = url[1].decode("latin-1") - if scheme == "": - raise UnsupportedProtocol( - f"The request to '://{host}/' is missing either an 'http://' \ - or 'https://' protocol." - ) - else: - raise UnsupportedProtocol( - f"The request to '{scheme}://{host}' has \ - an unsupported protocol {scheme!r}" - ) - - if not url[1]: - raise LocalProtocolError("Missing hostname in URL.") - - origin = url_to_origin(url) - timeout = cast(TimeoutDict, extensions.get("timeout", {})) - - self._keepalive_sweep() - - connection: Optional[SyncHTTPConnection] = None - while connection is None: - with self._connection_acquiry_lock: - # We get-or-create a connection as an atomic operation, to ensure - # that HTTP/2 requests issued in close concurrency will end up - # on the same connection. - logger.trace("get_connection_from_pool=%r", origin) - connection = self._get_connection_from_pool(origin) - - if connection is None: - connection = self._create_connection(origin=origin) - logger.trace("created connection=%r", connection) - self._add_to_pool(connection, timeout=timeout) - else: - logger.trace("reuse connection=%r", connection) + @property + def connections(self) -> List[ConnectionInterface]: + """ + Return a list of the connections currently in the pool. + + For example: + + ```python + >>> pool.connections + [ + , + , + , + ] + ``` + """ + return list(self._pool) - try: - response = connection.handle_request( - method, url, headers=headers, stream=stream, extensions=extensions - ) - except NewConnectionRequired: - connection = None - except BaseException: # noqa: PIE786 - # See https://github.com/encode/httpcore/pull/305 for motivation - # behind catching 'BaseException' rather than 'Exception' here. - logger.trace("remove from pool connection=%r", connection) - self._remove_from_pool(connection) - raise - - status_code, headers, stream, extensions = response - wrapped_stream = ResponseByteStream( - stream, connection=connection, callback=self._response_closed - ) - return status_code, headers, wrapped_stream, extensions - - def _get_connection_from_pool( - self, origin: Origin - ) -> Optional[SyncHTTPConnection]: - # Determine expired keep alive connections on this origin. - reuse_connection = None - connections_to_close = set() - - for connection in self._connections_for_origin(origin): - if connection.should_close(): - connections_to_close.add(connection) - self._remove_from_pool(connection) - elif connection.is_available(): - reuse_connection = connection - - # Close any dropped connections. - for connection in connections_to_close: - connection.close() - - return reuse_connection - - def _response_closed(self, connection: SyncHTTPConnection) -> None: - remove_from_pool = False - close_connection = False - - if connection.is_closed(): - remove_from_pool = True - elif connection.is_idle(): - num_connections = len(self._get_all_connections()) - if ( - self._max_keepalive_connections is not None - and num_connections > self._max_keepalive_connections - ): - remove_from_pool = True - close_connection = True - - if remove_from_pool: - self._remove_from_pool(connection) - - if close_connection: - connection.close() - - def _keepalive_sweep(self) -> None: + def _attempt_to_acquire_connection(self, status: RequestStatus) -> bool: + """ + Attempt to provide a connection that can handle the given origin. + """ + origin = status.request.url.origin + + # If there are queued requests in front of us, then don't acquire a + # connection. We handle requests strictly in order. + waiting = [s for s in self._requests if s.connection is None] + if waiting and waiting[0] is not status: + return False + + # Reuse an existing connection if one is currently available. + for idx, connection in enumerate(self._pool): + if connection.can_handle_request(origin) and connection.is_available(): + self._pool.pop(idx) + self._pool.insert(0, connection) + status.set_connection(connection) + return True + + # If the pool is currently full, attempt to close one idle connection. + if len(self._pool) >= self._max_connections: + for idx, connection in reversed(list(enumerate(self._pool))): + if connection.is_idle(): + connection.close() + self._pool.pop(idx) + break + + # If the pool is still full, then we cannot acquire a connection. + if len(self._pool) >= self._max_connections: + return False + + # Otherwise create a new connection. + connection = self.create_connection(origin) + self._pool.insert(0, connection) + status.set_connection(connection) + return True + + def _close_expired_connections(self) -> None: + """ + Clean up the connection pool by closing off any connections that have expired. """ - Remove any IDLE connections that have expired past their keep-alive time. + # Close any connections that have expired their keep-alive time. + for idx, connection in reversed(list(enumerate(self._pool))): + if connection.has_expired(): + connection.close() + self._pool.pop(idx) + + # If the pool size exceeds the maximum number of allowed keep-alive connections, + # then close off idle connections as required. + pool_size = len(self._pool) + for idx, connection in reversed(list(enumerate(self._pool))): + if connection.is_idle() and pool_size > self._max_keepalive_connections: + connection.close() + self._pool.pop(idx) + pool_size -= 1 + + def handle_request(self, request: Request) -> Response: """ - if self._keepalive_expiry is None: - return + Send an HTTP request, and return an HTTP response. - now = self._backend.time() - if now < self._next_keepalive_check: - return + This is the core implementation that is called into by `.request()` or `.stream()`. + """ + scheme = request.url.scheme.decode() + if scheme == "": + raise UnsupportedProtocol( + "Request URL is missing an 'http://' or 'https://' protocol." + ) + if scheme not in ("http", "https", "ws", "wss"): + raise UnsupportedProtocol( + f"Request URL has an unsupported protocol '{scheme}://'." + ) - self._next_keepalive_check = now + min(1.0, self._keepalive_expiry) - connections_to_close = set() + status = RequestStatus(request) - for connection in self._get_all_connections(): - if connection.should_close(): - connections_to_close.add(connection) - self._remove_from_pool(connection) + with self._pool_lock: + self._requests.append(status) + self._close_expired_connections() + self._attempt_to_acquire_connection(status) - for connection in connections_to_close: - connection.close() + while True: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("pool", None) + try: + connection = status.wait_for_connection(timeout=timeout) + except BaseException as exc: + # If we timeout here, or if the task is cancelled, then make + # sure to remove the request from the queue before bubbling + # up the exception. + with self._pool_lock: + # Ensure only remove when task exists. + if status in self._requests: + self._requests.remove(status) + raise exc - def _add_to_pool( - self, connection: SyncHTTPConnection, timeout: TimeoutDict - ) -> None: - logger.trace("adding connection to pool=%r", connection) - self._connection_semaphore.acquire(timeout=timeout.get("pool", None)) - with self._thread_lock: - self._connections.setdefault(connection.origin, set()) - self._connections[connection.origin].add(connection) - - def _remove_from_pool(self, connection: SyncHTTPConnection) -> None: - logger.trace("removing connection from pool=%r", connection) - with self._thread_lock: - if connection in self._connections.get(connection.origin, set()): - self._connection_semaphore.release() - self._connections[connection.origin].remove(connection) - if not self._connections[connection.origin]: - del self._connections[connection.origin] - - def _connections_for_origin(self, origin: Origin) -> Set[SyncHTTPConnection]: - return set(self._connections.get(origin, set())) - - def _get_all_connections(self) -> Set[SyncHTTPConnection]: - connections: Set[SyncHTTPConnection] = set() - for connection_set in self._connections.values(): - connections |= connection_set - return connections + try: + response = connection.handle_request(request) + except ConnectionNotAvailable: + # The ConnectionNotAvailable exception is a special case, that + # indicates we need to retry the request on a new connection. + # + # The most common case where this can occur is when multiple + # requests are queued waiting for a single connection, which + # might end up as an HTTP/2 connection, but which actually ends + # up as HTTP/1.1. + with self._pool_lock: + # Maintain our position in the request queue, but reset the + # status so that the request becomes queued again. + status.unset_connection() + self._attempt_to_acquire_connection(status) + except BaseException as exc: + with ShieldCancellation(): + self.response_closed(status) + raise exc + else: + break + + # When we return the response, we wrap the stream in a special class + # that handles notifying the connection pool once the response + # has been released. + assert isinstance(response.stream, Iterable) + return Response( + status=response.status, + headers=response.headers, + content=ConnectionPoolByteStream(response.stream, self, status), + extensions=response.extensions, + ) - def close(self) -> None: - connections = self._get_all_connections() - for connection in connections: - self._remove_from_pool(connection) + def response_closed(self, status: RequestStatus) -> None: + """ + This method acts as a callback once the request/response cycle is complete. - # Close all connections - for connection in connections: - connection.close() + It is called into from the `ConnectionPoolByteStream.close()` method. + """ + assert status.connection is not None + connection = status.connection + + with self._pool_lock: + # Update the state of the connection pool. + if status in self._requests: + self._requests.remove(status) + + if connection.is_closed() and connection in self._pool: + self._pool.remove(connection) + + # Since we've had a response closed, it's possible we'll now be able + # to service one or more requests that are currently pending. + for status in self._requests: + if status.connection is None: + acquired = self._attempt_to_acquire_connection(status) + # If we could not acquire a connection for a queued request + # then we don't need to check anymore requests that are + # queued later behind it. + if not acquired: + break + + # Housekeeping. + self._close_expired_connections() - def get_connection_info(self) -> Dict[str, List[str]]: + def close(self) -> None: """ - Returns a dict of origin URLs to a list of summary strings for each connection. + Close any connections in the pool. """ - self._keepalive_sweep() + with self._pool_lock: + for connection in self._pool: + connection.close() + self._pool = [] + self._requests = [] - stats = {} - for origin, connections in self._connections.items(): - stats[origin_to_url_string(origin)] = sorted( - [connection.info() for connection in connections] - ) - return stats + def __enter__(self) -> "ConnectionPool": + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]] = None, + exc_value: Optional[BaseException] = None, + traceback: Optional[TracebackType] = None, + ) -> None: + self.close() + + +class ConnectionPoolByteStream: + """ + A wrapper around the response byte stream, that additionally handles + notifying the connection pool when the response has been closed. + """ + + def __init__( + self, + stream: Iterable[bytes], + pool: ConnectionPool, + status: RequestStatus, + ) -> None: + self._stream = stream + self._pool = pool + self._status = status + + def __iter__(self) -> Iterator[bytes]: + for part in self._stream: + yield part + + def close(self) -> None: + try: + if hasattr(self._stream, "close"): + self._stream.close() + finally: + with ShieldCancellation(): + self._pool.response_closed(self._status) diff --git a/packages/httpcore/_sync/http.py b/packages/httpcore/_sync/http.py deleted file mode 100644 index c128a96b2..000000000 --- a/packages/httpcore/_sync/http.py +++ /dev/null @@ -1,42 +0,0 @@ -from ssl import SSLContext - -from .._backends.sync import SyncSocketStream -from .._types import TimeoutDict -from .base import SyncHTTPTransport - - -class SyncBaseHTTPConnection(SyncHTTPTransport): - def info(self) -> str: - raise NotImplementedError() # pragma: nocover - - def should_close(self) -> bool: - """ - Return `True` if the connection is in a state where it should be closed. - """ - raise NotImplementedError() # pragma: nocover - - def is_idle(self) -> bool: - """ - Return `True` if the connection is currently idle. - """ - raise NotImplementedError() # pragma: nocover - - def is_closed(self) -> bool: - """ - Return `True` if the connection has been closed. - """ - raise NotImplementedError() # pragma: nocover - - def is_available(self) -> bool: - """ - Return `True` if the connection is currently able to accept an outgoing request. - """ - raise NotImplementedError() # pragma: nocover - - def start_tls( - self, hostname: bytes, ssl_context: SSLContext, timeout: TimeoutDict = None - ) -> SyncSocketStream: - """ - Upgrade the underlying socket to TLS. - """ - raise NotImplementedError() # pragma: nocover diff --git a/packages/httpcore/_sync/http11.py b/packages/httpcore/_sync/http11.py index 5dbb42e02..0cc100e3f 100644 --- a/packages/httpcore/_sync/http11.py +++ b/packages/httpcore/_sync/http11.py @@ -1,194 +1,186 @@ import enum +import logging import time -from ssl import SSLContext -from typing import Iterator, List, Optional, Tuple, Union, cast +from types import TracebackType +from typing import ( + Iterable, + Iterator, + List, + Optional, + Tuple, + Type, + Union, + cast, +) import h11 -from .._backends.sync import SyncSocketStream -from .._bytestreams import IteratorByteStream -from .._exceptions import LocalProtocolError, RemoteProtocolError, map_exceptions -from .._types import URL, Headers, TimeoutDict -from .._utils import get_logger -from .base import SyncByteStream, NewConnectionRequired -from .http import SyncBaseHTTPConnection +from .._backends.base import NetworkStream +from .._exceptions import ( + ConnectionNotAvailable, + LocalProtocolError, + RemoteProtocolError, + WriteError, + map_exceptions, +) +from .._models import Origin, Request, Response +from .._synchronization import Lock, ShieldCancellation +from .._trace import Trace +from .interfaces import ConnectionInterface -H11Event = Union[ +logger = logging.getLogger("httpcore.http11") + + +# A subset of `h11.Event` types supported by `_send_event` +H11SendEvent = Union[ h11.Request, - h11.Response, - h11.InformationalResponse, h11.Data, h11.EndOfMessage, - h11.ConnectionClosed, ] -class ConnectionState(enum.IntEnum): +class HTTPConnectionState(enum.IntEnum): NEW = 0 ACTIVE = 1 IDLE = 2 CLOSED = 3 -logger = get_logger(__name__) - - -class SyncHTTP11Connection(SyncBaseHTTPConnection): +class HTTP11Connection(ConnectionInterface): READ_NUM_BYTES = 64 * 1024 + MAX_INCOMPLETE_EVENT_SIZE = 100 * 1024 - def __init__(self, socket: SyncSocketStream, keepalive_expiry: float = None): - self.socket = socket - + def __init__( + self, + origin: Origin, + stream: NetworkStream, + keepalive_expiry: Optional[float] = None, + ) -> None: + self._origin = origin + self._network_stream = stream self._keepalive_expiry: Optional[float] = keepalive_expiry - self._should_expire_at: Optional[float] = None - self._h11_state = h11.Connection(our_role=h11.CLIENT) - self._state = ConnectionState.NEW - - def __repr__(self) -> str: - return f"" - - def _now(self) -> float: - return time.monotonic() - - def _server_disconnected(self) -> bool: - """ - Return True if the connection is idle, and the underlying socket is readable. - The only valid state the socket can be readable here is when the b"" - EOF marker is about to be returned, indicating a server disconnect. - """ - return self._state == ConnectionState.IDLE and self.socket.is_readable() - - def _keepalive_expired(self) -> bool: - """ - Return True if the connection is idle, and has passed it's keepalive - expiry time. - """ - return ( - self._state == ConnectionState.IDLE - and self._should_expire_at is not None - and self._now() >= self._should_expire_at + self._expire_at: Optional[float] = None + self._state = HTTPConnectionState.NEW + self._state_lock = Lock() + self._request_count = 0 + self._h11_state = h11.Connection( + our_role=h11.CLIENT, + max_incomplete_event_size=self.MAX_INCOMPLETE_EVENT_SIZE, ) - def info(self) -> str: - return f"HTTP/1.1, {self._state.name}" - - def should_close(self) -> bool: - """ - Return `True` if the connection is in a state where it should be closed. - """ - return self._server_disconnected() or self._keepalive_expired() - - def is_idle(self) -> bool: - """ - Return `True` if the connection is currently idle. - """ - return self._state == ConnectionState.IDLE - - def is_closed(self) -> bool: - """ - Return `True` if the connection has been closed. - """ - return self._state == ConnectionState.CLOSED - - def is_available(self) -> bool: - """ - Return `True` if the connection is currently able to accept an outgoing request. - """ - return self._state == ConnectionState.IDLE + def handle_request(self, request: Request) -> Response: + if not self.can_handle_request(request.url.origin): + raise RuntimeError( + f"Attempted to send request to {request.url.origin} on connection " + f"to {self._origin}" + ) + + with self._state_lock: + if self._state in (HTTPConnectionState.NEW, HTTPConnectionState.IDLE): + self._request_count += 1 + self._state = HTTPConnectionState.ACTIVE + self._expire_at = None + else: + raise ConnectionNotAvailable() + + try: + kwargs = {"request": request} + try: + with Trace( + "send_request_headers", logger, request, kwargs + ) as trace: + self._send_request_headers(**kwargs) + with Trace("send_request_body", logger, request, kwargs) as trace: + self._send_request_body(**kwargs) + except WriteError: + # If we get a write error while we're writing the request, + # then we supress this error and move on to attempting to + # read the response. Servers can sometimes close the request + # pre-emptively and then respond with a well formed HTTP + # error response. + pass + + with Trace( + "receive_response_headers", logger, request, kwargs + ) as trace: + ( + http_version, + status, + reason_phrase, + headers, + ) = self._receive_response_headers(**kwargs) + trace.return_value = ( + http_version, + status, + reason_phrase, + headers, + ) + + return Response( + status=status, + headers=headers, + content=HTTP11ConnectionByteStream(self, request), + extensions={ + "http_version": http_version, + "reason_phrase": reason_phrase, + "network_stream": self._network_stream, + }, + ) + except BaseException as exc: + with ShieldCancellation(): + with Trace("response_closed", logger, request) as trace: + self._response_closed() + raise exc + + # Sending the request... + + def _send_request_headers(self, request: Request) -> None: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("write", None) - def handle_request( - self, - method: bytes, - url: URL, - headers: Headers, - stream: SyncByteStream, - extensions: dict, - ) -> Tuple[int, Headers, SyncByteStream, dict]: - """ - Send a single HTTP/1.1 request. - - Note that there is no kind of task/thread locking at this layer of interface. - Dealing with locking for concurrency is handled by the `SyncHTTPConnection`. - """ - timeout = cast(TimeoutDict, extensions.get("timeout", {})) - - if self._state in (ConnectionState.NEW, ConnectionState.IDLE): - self._state = ConnectionState.ACTIVE - self._should_expire_at = None - else: - raise NewConnectionRequired() - - self._send_request(method, url, headers, timeout) - self._send_request_body(stream, timeout) - ( - http_version, - status_code, - reason_phrase, - headers, - ) = self._receive_response(timeout) - response_stream = IteratorByteStream( - iterator=self._receive_response_data(timeout), - close_func=self._response_closed, - ) - extensions = { - "http_version": http_version, - "reason_phrase": reason_phrase, - } - return (status_code, headers, response_stream, extensions) - - def start_tls( - self, hostname: bytes, ssl_context: SSLContext, timeout: TimeoutDict = None - ) -> SyncSocketStream: - timeout = {} if timeout is None else timeout - self.socket = self.socket.start_tls(hostname, ssl_context, timeout) - return self.socket - - def _send_request( - self, method: bytes, url: URL, headers: Headers, timeout: TimeoutDict - ) -> None: - """ - Send the request line and headers. - """ - logger.trace("send_request method=%r url=%r headers=%s", method, url, headers) - _scheme, _host, _port, target = url with map_exceptions({h11.LocalProtocolError: LocalProtocolError}): - event = h11.Request(method=method, target=target, headers=headers) - self._send_event(event, timeout) - - def _send_request_body( - self, stream: SyncByteStream, timeout: TimeoutDict - ) -> None: - """ - Send the request body. - """ - # Send the request body. - for chunk in stream: - logger.trace("send_data=Data(<%d bytes>)", len(chunk)) + event = h11.Request( + method=request.method, + target=request.url.target, + headers=request.headers, + ) + self._send_event(event, timeout=timeout) + + def _send_request_body(self, request: Request) -> None: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("write", None) + + assert isinstance(request.stream, Iterable) + for chunk in request.stream: event = h11.Data(data=chunk) - self._send_event(event, timeout) + self._send_event(event, timeout=timeout) - # Finalize sending the request. - event = h11.EndOfMessage() - self._send_event(event, timeout) + self._send_event(h11.EndOfMessage(), timeout=timeout) - def _send_event(self, event: H11Event, timeout: TimeoutDict) -> None: - """ - Send a single `h11` event to the network, waiting for the data to - drain before returning. - """ + def _send_event( + self, event: h11.Event, timeout: Optional[float] = None + ) -> None: bytes_to_send = self._h11_state.send(event) - self.socket.write(bytes_to_send, timeout) + if bytes_to_send is not None: + self._network_stream.write(bytes_to_send, timeout=timeout) - def _receive_response( - self, timeout: TimeoutDict + # Receiving the response... + + def _receive_response_headers( + self, request: Request ) -> Tuple[bytes, int, bytes, List[Tuple[bytes, bytes]]]: - """ - Read the response status and headers from the network. - """ + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("read", None) + while True: - event = self._receive_event(timeout) + event = self._receive_event(timeout=timeout) if isinstance(event, h11.Response): break + if ( + isinstance(event, h11.InformationalResponse) + and event.status_code == 101 + ): + break http_version = b"HTTP/" + event.http_version @@ -198,31 +190,28 @@ def _receive_response( return http_version, event.status_code, event.reason, headers - def _receive_response_data( - self, timeout: TimeoutDict - ) -> Iterator[bytes]: - """ - Read the response data from the network. - """ + def _receive_response_body(self, request: Request) -> Iterator[bytes]: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("read", None) + while True: - event = self._receive_event(timeout) + event = self._receive_event(timeout=timeout) if isinstance(event, h11.Data): - logger.trace("receive_event=Data(<%d bytes>)", len(event.data)) yield bytes(event.data) elif isinstance(event, (h11.EndOfMessage, h11.PAUSED)): - logger.trace("receive_event=%r", event) break - def _receive_event(self, timeout: TimeoutDict) -> H11Event: - """ - Read a single `h11` event, reading more data from the network if needed. - """ + def _receive_event( + self, timeout: Optional[float] = None + ) -> Union[h11.Event, Type[h11.PAUSED]]: while True: with map_exceptions({h11.RemoteProtocolError: RemoteProtocolError}): event = self._h11_state.next_event() if event is h11.NEED_DATA: - data = self.socket.read(self.READ_NUM_BYTES, timeout) + data = self._network_stream.read( + self.READ_NUM_BYTES, timeout=timeout + ) # If we feed this case through h11 we'll raise an exception like: # @@ -230,40 +219,125 @@ def _receive_event(self, timeout: TimeoutDict) -> H11Event: # ConnectionClosed when role=SERVER and state=SEND_RESPONSE # # Which is accurate, but not very informative from an end-user - # perspective. Instead we handle messaging for this case distinctly. + # perspective. Instead we handle this case distinctly and treat + # it as a ConnectError. if data == b"" and self._h11_state.their_state == h11.SEND_RESPONSE: msg = "Server disconnected without sending a response." raise RemoteProtocolError(msg) self._h11_state.receive_data(data) else: - assert event is not h11.NEED_DATA - break - return event + # mypy fails to narrow the type in the above if statement above + return cast(Union[h11.Event, Type[h11.PAUSED]], event) def _response_closed(self) -> None: - logger.trace( - "response_closed our_state=%r their_state=%r", - self._h11_state.our_state, - self._h11_state.their_state, - ) - if ( - self._h11_state.our_state is h11.DONE - and self._h11_state.their_state is h11.DONE - ): - self._h11_state.start_next_cycle() - self._state = ConnectionState.IDLE - if self._keepalive_expiry is not None: - self._should_expire_at = self._now() + self._keepalive_expiry - else: - self.close() + with self._state_lock: + if ( + self._h11_state.our_state is h11.DONE + and self._h11_state.their_state is h11.DONE + ): + self._state = HTTPConnectionState.IDLE + self._h11_state.start_next_cycle() + if self._keepalive_expiry is not None: + now = time.monotonic() + self._expire_at = now + self._keepalive_expiry + else: + self.close() + + # Once the connection is no longer required... def close(self) -> None: - if self._state != ConnectionState.CLOSED: - self._state = ConnectionState.CLOSED + # Note that this method unilaterally closes the connection, and does + # not have any kind of locking in place around it. + self._state = HTTPConnectionState.CLOSED + self._network_stream.close() + + # The ConnectionInterface methods provide information about the state of + # the connection, allowing for a connection pooling implementation to + # determine when to reuse and when to close the connection... + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._origin + + def is_available(self) -> bool: + # Note that HTTP/1.1 connections in the "NEW" state are not treated as + # being "available". The control flow which created the connection will + # be able to send an outgoing request, but the connection will not be + # acquired from the connection pool for any other request. + return self._state == HTTPConnectionState.IDLE + + def has_expired(self) -> bool: + now = time.monotonic() + keepalive_expired = self._expire_at is not None and now > self._expire_at + + # If the HTTP connection is idle but the socket is readable, then the + # only valid state is that the socket is about to return b"", indicating + # a server-initiated disconnect. + server_disconnected = ( + self._state == HTTPConnectionState.IDLE + and self._network_stream.get_extra_info("is_readable") + ) + + return keepalive_expired or server_disconnected + + def is_idle(self) -> bool: + return self._state == HTTPConnectionState.IDLE + + def is_closed(self) -> bool: + return self._state == HTTPConnectionState.CLOSED + + def info(self) -> str: + origin = str(self._origin) + return ( + f"{origin!r}, HTTP/1.1, {self._state.name}, " + f"Request Count: {self._request_count}" + ) + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + origin = str(self._origin) + return ( + f"<{class_name} [{origin!r}, {self._state.name}, " + f"Request Count: {self._request_count}]>" + ) - if self._h11_state.our_state is h11.MUST_CLOSE: - event = h11.ConnectionClosed() - self._h11_state.send(event) + # These context managers are not used in the standard flow, but are + # useful for testing or working with connection instances directly. - self.socket.close() + def __enter__(self) -> "HTTP11Connection": + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]] = None, + exc_value: Optional[BaseException] = None, + traceback: Optional[TracebackType] = None, + ) -> None: + self.close() + + +class HTTP11ConnectionByteStream: + def __init__(self, connection: HTTP11Connection, request: Request) -> None: + self._connection = connection + self._request = request + self._closed = False + + def __iter__(self) -> Iterator[bytes]: + kwargs = {"request": self._request} + try: + with Trace("receive_response_body", logger, self._request, kwargs): + for chunk in self._connection._receive_response_body(**kwargs): + yield chunk + except BaseException as exc: + # If we get an exception while streaming the response, + # we want to close the response (and possibly the connection) + # before raising that exception. + with ShieldCancellation(): + self.close() + raise exc + + def close(self) -> None: + if not self._closed: + self._closed = True + with Trace("response_closed", logger, self._request): + self._connection._response_closed() diff --git a/packages/httpcore/_sync/http2.py b/packages/httpcore/_sync/http2.py index 90caf5faf..d141d459a 100644 --- a/packages/httpcore/_sync/http2.py +++ b/packages/httpcore/_sync/http2.py @@ -1,175 +1,190 @@ import enum +import logging import time -from ssl import SSLContext -from typing import Iterator, Dict, List, Optional, Tuple, cast +import types +import typing +import h2.config import h2.connection import h2.events -from h2.config import H2Configuration -from h2.exceptions import NoAvailableStreamIDError -from h2.settings import SettingCodes, Settings +import h2.exceptions +import h2.settings -from .._backends.sync import SyncBackend, SyncLock, SyncSemaphore, SyncSocketStream -from .._bytestreams import IteratorByteStream -from .._exceptions import LocalProtocolError, PoolTimeout, RemoteProtocolError -from .._types import URL, Headers, TimeoutDict -from .._utils import get_logger -from .base import SyncByteStream, NewConnectionRequired -from .http import SyncBaseHTTPConnection +from .._backends.base import NetworkStream +from .._exceptions import ( + ConnectionNotAvailable, + LocalProtocolError, + RemoteProtocolError, +) +from .._models import Origin, Request, Response +from .._synchronization import Lock, Semaphore, ShieldCancellation +from .._trace import Trace +from .interfaces import ConnectionInterface -logger = get_logger(__name__) +logger = logging.getLogger("httpcore.http2") -class ConnectionState(enum.IntEnum): - IDLE = 0 +def has_body_headers(request: Request) -> bool: + return any( + k.lower() == b"content-length" or k.lower() == b"transfer-encoding" + for k, v in request.headers + ) + + +class HTTPConnectionState(enum.IntEnum): ACTIVE = 1 - CLOSED = 2 + IDLE = 2 + CLOSED = 3 -class SyncHTTP2Connection(SyncBaseHTTPConnection): +class HTTP2Connection(ConnectionInterface): READ_NUM_BYTES = 64 * 1024 - CONFIG = H2Configuration(validate_inbound_headers=False) + CONFIG = h2.config.H2Configuration(validate_inbound_headers=False) def __init__( self, - socket: SyncSocketStream, - backend: SyncBackend, - keepalive_expiry: float = None, + origin: Origin, + stream: NetworkStream, + keepalive_expiry: typing.Optional[float] = None, ): - self.socket = socket - - self._backend = backend + self._origin = origin + self._network_stream = stream + self._keepalive_expiry: typing.Optional[float] = keepalive_expiry self._h2_state = h2.connection.H2Connection(config=self.CONFIG) - + self._state = HTTPConnectionState.IDLE + self._expire_at: typing.Optional[float] = None + self._request_count = 0 + self._init_lock = Lock() + self._state_lock = Lock() + self._read_lock = Lock() + self._write_lock = Lock() self._sent_connection_init = False - self._streams: Dict[int, SyncHTTP2Stream] = {} - self._events: Dict[int, List[h2.events.Event]] = {} - - self._keepalive_expiry: Optional[float] = keepalive_expiry - self._should_expire_at: Optional[float] = None - self._state = ConnectionState.ACTIVE - self._exhausted_available_stream_ids = False - - def __repr__(self) -> str: - return f"" - - def info(self) -> str: - return f"HTTP/2, {self._state.name}, {len(self._streams)} streams" - - def _now(self) -> float: - return time.monotonic() + self._used_all_stream_ids = False + self._connection_error = False + + # Mapping from stream ID to response stream events. + self._events: typing.Dict[ + int, + typing.Union[ + h2.events.ResponseReceived, + h2.events.DataReceived, + h2.events.StreamEnded, + h2.events.StreamReset, + ], + ] = {} + + # Connection terminated events are stored as state since + # we need to handle them for all streams. + self._connection_terminated: typing.Optional[ + h2.events.ConnectionTerminated + ] = None + + self._read_exception: typing.Optional[Exception] = None + self._write_exception: typing.Optional[Exception] = None + + def handle_request(self, request: Request) -> Response: + if not self.can_handle_request(request.url.origin): + # This cannot occur in normal operation, since the connection pool + # will only send requests on connections that handle them. + # It's in place simply for resilience as a guard against incorrect + # usage, for anyone working directly with httpcore connections. + raise RuntimeError( + f"Attempted to send request to {request.url.origin} on connection " + f"to {self._origin}" + ) - def should_close(self) -> bool: - """ - Return `True` if the connection is currently idle, and the keepalive - timeout has passed. - """ - return ( - self._state == ConnectionState.IDLE - and self._should_expire_at is not None - and self._now() >= self._should_expire_at - ) + with self._state_lock: + if self._state in (HTTPConnectionState.ACTIVE, HTTPConnectionState.IDLE): + self._request_count += 1 + self._expire_at = None + self._state = HTTPConnectionState.ACTIVE + else: + raise ConnectionNotAvailable() - def is_idle(self) -> bool: - """ - Return `True` if the connection is currently idle. - """ - return self._state == ConnectionState.IDLE + with self._init_lock: + if not self._sent_connection_init: + try: + kwargs = {"request": request} + with Trace("send_connection_init", logger, request, kwargs): + self._send_connection_init(**kwargs) + except BaseException as exc: + with ShieldCancellation(): + self.close() + raise exc - def is_closed(self) -> bool: - """ - Return `True` if the connection has been closed. - """ - return self._state == ConnectionState.CLOSED + self._sent_connection_init = True - def is_available(self) -> bool: - """ - Return `True` if the connection is currently able to accept an outgoing request. - This occurs when any of the following occur: - - * The connection has not yet been opened, and HTTP/2 support is enabled. - We don't *know* at this point if we'll end up on an HTTP/2 connection or - not, but we *might* do, so we indicate availability. - * The connection has been opened, and is currently idle. - * The connection is open, and is an HTTP/2 connection. The connection must - also not have exhausted the maximum total number of stream IDs. - """ - return ( - self._state != ConnectionState.CLOSED - and not self._exhausted_available_stream_ids - ) + # Initially start with just 1 until the remote server provides + # its max_concurrent_streams value + self._max_streams = 1 - @property - def init_lock(self) -> SyncLock: - # We do this lazily, to make sure backend autodetection always - # runs within an async context. - if not hasattr(self, "_initialization_lock"): - self._initialization_lock = self._backend.create_lock() - return self._initialization_lock - - @property - def read_lock(self) -> SyncLock: - # We do this lazily, to make sure backend autodetection always - # runs within an async context. - if not hasattr(self, "_read_lock"): - self._read_lock = self._backend.create_lock() - return self._read_lock - - @property - def max_streams_semaphore(self) -> SyncSemaphore: - # We do this lazily, to make sure backend autodetection always - # runs within an async context. - if not hasattr(self, "_max_streams_semaphore"): - max_streams = self._h2_state.local_settings.max_concurrent_streams - self._max_streams_semaphore = self._backend.create_semaphore( - max_streams, exc_class=PoolTimeout - ) - return self._max_streams_semaphore + local_settings_max_streams = ( + self._h2_state.local_settings.max_concurrent_streams + ) + self._max_streams_semaphore = Semaphore(local_settings_max_streams) - def start_tls( - self, hostname: bytes, ssl_context: SSLContext, timeout: TimeoutDict = None - ) -> SyncSocketStream: - raise NotImplementedError("TLS upgrade not supported on HTTP/2 connections.") + for _ in range(local_settings_max_streams - self._max_streams): + self._max_streams_semaphore.acquire() - def handle_request( - self, - method: bytes, - url: URL, - headers: Headers, - stream: SyncByteStream, - extensions: dict, - ) -> Tuple[int, Headers, SyncByteStream, dict]: - timeout = cast(TimeoutDict, extensions.get("timeout", {})) - - with self.init_lock: - if not self._sent_connection_init: - # The very first stream is responsible for initiating the connection. - self._state = ConnectionState.ACTIVE - self.send_connection_init(timeout) - self._sent_connection_init = True + self._max_streams_semaphore.acquire() - self.max_streams_semaphore.acquire() try: - try: - stream_id = self._h2_state.get_next_available_stream_id() - except NoAvailableStreamIDError: - self._exhausted_available_stream_ids = True - raise NewConnectionRequired() - else: - self._state = ConnectionState.ACTIVE - self._should_expire_at = None - - h2_stream = SyncHTTP2Stream(stream_id=stream_id, connection=self) - self._streams[stream_id] = h2_stream + stream_id = self._h2_state.get_next_available_stream_id() self._events[stream_id] = [] - return h2_stream.handle_request( - method, url, headers, stream, extensions - ) - except Exception: # noqa: PIE786 - self.max_streams_semaphore.release() - raise + except h2.exceptions.NoAvailableStreamIDError: # pragma: nocover + self._used_all_stream_ids = True + self._request_count -= 1 + raise ConnectionNotAvailable() - def send_connection_init(self, timeout: TimeoutDict) -> None: + try: + kwargs = {"request": request, "stream_id": stream_id} + with Trace("send_request_headers", logger, request, kwargs): + self._send_request_headers(request=request, stream_id=stream_id) + with Trace("send_request_body", logger, request, kwargs): + self._send_request_body(request=request, stream_id=stream_id) + with Trace( + "receive_response_headers", logger, request, kwargs + ) as trace: + status, headers = self._receive_response( + request=request, stream_id=stream_id + ) + trace.return_value = (status, headers) + + return Response( + status=status, + headers=headers, + content=HTTP2ConnectionByteStream(self, request, stream_id=stream_id), + extensions={ + "http_version": b"HTTP/2", + "network_stream": self._network_stream, + "stream_id": stream_id, + }, + ) + except BaseException as exc: # noqa: PIE786 + with ShieldCancellation(): + kwargs = {"stream_id": stream_id} + with Trace("response_closed", logger, request, kwargs): + self._response_closed(stream_id=stream_id) + + if isinstance(exc, h2.exceptions.ProtocolError): + # One case where h2 can raise a protocol error is when a + # closed frame has been seen by the state machine. + # + # This happens when one stream is reading, and encounters + # a GOAWAY event. Other flows of control may then raise + # a protocol error at any point they interact with the 'h2_state'. + # + # In this case we'll have stored the event, and should raise + # it as a RemoteProtocolError. + if self._connection_terminated: # pragma: nocover + raise RemoteProtocolError(self._connection_terminated) + # If h2 raises a protocol error in some other state then we + # must somehow have made a protocol violation. + raise LocalProtocolError(exc) # pragma: nocover + + raise exc + + def _send_connection_init(self, request: Request) -> None: """ The HTTP/2 connection requires some initial setup before we can start using individual request/response streams on it. @@ -177,15 +192,15 @@ def send_connection_init(self, timeout: TimeoutDict) -> None: # Need to set these manually here instead of manipulating via # __setitem__() otherwise the H2Connection will emit SettingsUpdate # frames in addition to sending the undesired defaults. - self._h2_state.local_settings = Settings( + self._h2_state.local_settings = h2.settings.Settings( client=True, initial_values={ # Disable PUSH_PROMISE frames from the server since we don't do anything # with them for now. Maybe when we support caching? - SettingCodes.ENABLE_PUSH: 0, + h2.settings.SettingCodes.ENABLE_PUSH: 0, # These two are taken from h2 for safe defaults - SettingCodes.MAX_CONCURRENT_STREAMS: 100, - SettingCodes.MAX_HEADER_LIST_SIZE: 65536, + h2.settings.SettingCodes.MAX_CONCURRENT_STREAMS: 100, + h2.settings.SettingCodes.MAX_HEADER_LIST_SIZE: 65536, }, ) @@ -196,227 +211,85 @@ def send_connection_init(self, timeout: TimeoutDict) -> None: h2.settings.SettingCodes.ENABLE_CONNECT_PROTOCOL ] - logger.trace("initiate_connection=%r", self) self._h2_state.initiate_connection() - self._h2_state.increment_flow_control_window(2 ** 24) - data_to_send = self._h2_state.data_to_send() - self.socket.write(data_to_send, timeout) - - def is_socket_readable(self) -> bool: - return self.socket.is_readable() - - def close(self) -> None: - logger.trace("close_connection=%r", self) - if self._state != ConnectionState.CLOSED: - self._state = ConnectionState.CLOSED - - self.socket.close() - - def wait_for_outgoing_flow(self, stream_id: int, timeout: TimeoutDict) -> int: - """ - Returns the maximum allowable outgoing flow for a given stream. - If the allowable flow is zero, then waits on the network until - WindowUpdated frames have increased the flow rate. - https://tools.ietf.org/html/rfc7540#section-6.9 - """ - local_flow = self._h2_state.local_flow_control_window(stream_id) - connection_flow = self._h2_state.max_outbound_frame_size - flow = min(local_flow, connection_flow) - while flow == 0: - self.receive_events(timeout) - local_flow = self._h2_state.local_flow_control_window(stream_id) - connection_flow = self._h2_state.max_outbound_frame_size - flow = min(local_flow, connection_flow) - return flow + self._h2_state.increment_flow_control_window(2**24) + self._write_outgoing_data(request) - def wait_for_event( - self, stream_id: int, timeout: TimeoutDict - ) -> h2.events.Event: - """ - Returns the next event for a given stream. - If no events are available yet, then waits on the network until - an event is available. - """ - with self.read_lock: - while not self._events[stream_id]: - self.receive_events(timeout) - return self._events[stream_id].pop(0) + # Sending the request... - def receive_events(self, timeout: TimeoutDict) -> None: + def _send_request_headers(self, request: Request, stream_id: int) -> None: """ - Read some data from the network, and update the H2 state. + Send the request headers to a given stream ID. """ - data = self.socket.read(self.READ_NUM_BYTES, timeout) - if data == b"": - raise RemoteProtocolError("Server disconnected") - - events = self._h2_state.receive_data(data) - for event in events: - event_stream_id = getattr(event, "stream_id", 0) - logger.trace("receive_event stream_id=%r event=%s", event_stream_id, event) - - if hasattr(event, "error_code"): - raise RemoteProtocolError(event) - - if event_stream_id in self._events: - self._events[event_stream_id].append(event) - - data_to_send = self._h2_state.data_to_send() - self.socket.write(data_to_send, timeout) - - def send_headers( - self, stream_id: int, headers: Headers, end_stream: bool, timeout: TimeoutDict - ) -> None: - logger.trace("send_headers stream_id=%r headers=%r", stream_id, headers) - self._h2_state.send_headers(stream_id, headers, end_stream=end_stream) - self._h2_state.increment_flow_control_window(2 ** 24, stream_id=stream_id) - data_to_send = self._h2_state.data_to_send() - self.socket.write(data_to_send, timeout) - - def send_data( - self, stream_id: int, chunk: bytes, timeout: TimeoutDict - ) -> None: - logger.trace("send_data stream_id=%r chunk=%r", stream_id, chunk) - self._h2_state.send_data(stream_id, chunk) - data_to_send = self._h2_state.data_to_send() - self.socket.write(data_to_send, timeout) - - def end_stream(self, stream_id: int, timeout: TimeoutDict) -> None: - logger.trace("end_stream stream_id=%r", stream_id) - self._h2_state.end_stream(stream_id) - data_to_send = self._h2_state.data_to_send() - self.socket.write(data_to_send, timeout) - - def acknowledge_received_data( - self, stream_id: int, amount: int, timeout: TimeoutDict - ) -> None: - self._h2_state.acknowledge_received_data(amount, stream_id) - data_to_send = self._h2_state.data_to_send() - self.socket.write(data_to_send, timeout) - - def close_stream(self, stream_id: int) -> None: - try: - logger.trace("close_stream stream_id=%r", stream_id) - del self._streams[stream_id] - del self._events[stream_id] - - if not self._streams: - if self._state == ConnectionState.ACTIVE: - if self._exhausted_available_stream_ids: - self.close() - else: - self._state = ConnectionState.IDLE - if self._keepalive_expiry is not None: - self._should_expire_at = ( - self._now() + self._keepalive_expiry - ) - finally: - self.max_streams_semaphore.release() - - -class SyncHTTP2Stream: - def __init__(self, stream_id: int, connection: SyncHTTP2Connection) -> None: - self.stream_id = stream_id - self.connection = connection - - def handle_request( - self, - method: bytes, - url: URL, - headers: Headers, - stream: SyncByteStream, - extensions: dict, - ) -> Tuple[int, Headers, SyncByteStream, dict]: - headers = [(k.lower(), v) for (k, v) in headers] - timeout = cast(TimeoutDict, extensions.get("timeout", {})) - - # Send the request. - seen_headers = set(key for key, value in headers) - has_body = ( - b"content-length" in seen_headers or b"transfer-encoding" in seen_headers - ) - - self.send_headers(method, url, headers, has_body, timeout) - if has_body: - self.send_body(stream, timeout) - - # Receive the response. - status_code, headers = self.receive_response(timeout) - response_stream = IteratorByteStream( - iterator=self.body_iter(timeout), close_func=self._response_closed - ) - - extensions = { - "http_version": b"HTTP/2", - } - return (status_code, headers, response_stream, extensions) - - def send_headers( - self, - method: bytes, - url: URL, - headers: Headers, - has_body: bool, - timeout: TimeoutDict, - ) -> None: - scheme, hostname, port, path = url + end_stream = not has_body_headers(request) # In HTTP/2 the ':authority' pseudo-header is used instead of 'Host'. # In order to gracefully handle HTTP/1.1 and HTTP/2 we always require # HTTP/1.1 style headers, and map them appropriately if we end up on # an HTTP/2 connection. - authority = None - - for k, v in headers: - if k == b"host": - authority = v - break - - if authority is None: - # Mirror the same error we'd see with `h11`, so that the behaviour - # is consistent. Although we're dealing with an `:authority` - # pseudo-header by this point, from an end-user perspective the issue - # is that the outgoing request needed to include a `host` header. - raise LocalProtocolError("Missing mandatory Host: header") + authority = [v for k, v in request.headers if k.lower() == b"host"][0] headers = [ - (b":method", method), + (b":method", request.method), (b":authority", authority), - (b":scheme", scheme), - (b":path", path), + (b":scheme", request.url.scheme), + (b":path", request.url.target), ] + [ - (k, v) - for k, v in headers - if k + (k.lower(), v) + for k, v in request.headers + if k.lower() not in ( b"host", b"transfer-encoding", ) ] - end_stream = not has_body - self.connection.send_headers(self.stream_id, headers, end_stream, timeout) + self._h2_state.send_headers(stream_id, headers, end_stream=end_stream) + self._h2_state.increment_flow_control_window(2**24, stream_id=stream_id) + self._write_outgoing_data(request) - def send_body(self, stream: SyncByteStream, timeout: TimeoutDict) -> None: - for data in stream: - while data: - max_flow = self.connection.wait_for_outgoing_flow( - self.stream_id, timeout - ) - chunk_size = min(len(data), max_flow) - chunk, data = data[:chunk_size], data[chunk_size:] - self.connection.send_data(self.stream_id, chunk, timeout) + def _send_request_body(self, request: Request, stream_id: int) -> None: + """ + Iterate over the request body sending it to a given stream ID. + """ + if not has_body_headers(request): + return - self.connection.end_stream(self.stream_id, timeout) + assert isinstance(request.stream, typing.Iterable) + for data in request.stream: + self._send_stream_data(request, stream_id, data) + self._send_end_stream(request, stream_id) - def receive_response( - self, timeout: TimeoutDict - ) -> Tuple[int, List[Tuple[bytes, bytes]]]: + def _send_stream_data( + self, request: Request, stream_id: int, data: bytes + ) -> None: """ - Read the response status and headers from the network. + Send a single chunk of data in one or more data frames. + """ + while data: + max_flow = self._wait_for_outgoing_flow(request, stream_id) + chunk_size = min(len(data), max_flow) + chunk, data = data[:chunk_size], data[chunk_size:] + self._h2_state.send_data(stream_id, chunk) + self._write_outgoing_data(request) + + def _send_end_stream(self, request: Request, stream_id: int) -> None: + """ + Send an empty data frame on on a given stream ID with the END_STREAM flag set. + """ + self._h2_state.end_stream(stream_id) + self._write_outgoing_data(request) + + # Receiving the response... + + def _receive_response( + self, request: Request, stream_id: int + ) -> typing.Tuple[int, typing.List[typing.Tuple[bytes, bytes]]]: + """ + Return the response status code and headers for a given stream ID. """ while True: - event = self.connection.wait_for_event(self.stream_id, timeout) + event = self._receive_stream_event(request, stream_id) if isinstance(event, h2.events.ResponseReceived): break @@ -430,17 +303,287 @@ def receive_response( return (status_code, headers) - def body_iter(self, timeout: TimeoutDict) -> Iterator[bytes]: + def _receive_response_body( + self, request: Request, stream_id: int + ) -> typing.Iterator[bytes]: + """ + Iterator that returns the bytes of the response body for a given stream ID. + """ while True: - event = self.connection.wait_for_event(self.stream_id, timeout) + event = self._receive_stream_event(request, stream_id) if isinstance(event, h2.events.DataReceived): amount = event.flow_controlled_length - self.connection.acknowledge_received_data( - self.stream_id, amount, timeout - ) + self._h2_state.acknowledge_received_data(amount, stream_id) + self._write_outgoing_data(request) yield event.data - elif isinstance(event, (h2.events.StreamEnded, h2.events.StreamReset)): + elif isinstance(event, h2.events.StreamEnded): break - def _response_closed(self) -> None: - self.connection.close_stream(self.stream_id) + def _receive_stream_event( + self, request: Request, stream_id: int + ) -> typing.Union[ + h2.events.ResponseReceived, h2.events.DataReceived, h2.events.StreamEnded + ]: + """ + Return the next available event for a given stream ID. + + Will read more data from the network if required. + """ + while not self._events.get(stream_id): + self._receive_events(request, stream_id) + event = self._events[stream_id].pop(0) + if isinstance(event, h2.events.StreamReset): + raise RemoteProtocolError(event) + return event + + def _receive_events( + self, request: Request, stream_id: typing.Optional[int] = None + ) -> None: + """ + Read some data from the network until we see one or more events + for a given stream ID. + """ + with self._read_lock: + if self._connection_terminated is not None: + last_stream_id = self._connection_terminated.last_stream_id + if stream_id and last_stream_id and stream_id > last_stream_id: + self._request_count -= 1 + raise ConnectionNotAvailable() + raise RemoteProtocolError(self._connection_terminated) + + # This conditional is a bit icky. We don't want to block reading if we've + # actually got an event to return for a given stream. We need to do that + # check *within* the atomic read lock. Though it also need to be optional, + # because when we call it from `_wait_for_outgoing_flow` we *do* want to + # block until we've available flow control, event when we have events + # pending for the stream ID we're attempting to send on. + if stream_id is None or not self._events.get(stream_id): + events = self._read_incoming_data(request) + for event in events: + if isinstance(event, h2.events.RemoteSettingsChanged): + with Trace( + "receive_remote_settings", logger, request + ) as trace: + self._receive_remote_settings_change(event) + trace.return_value = event + + elif isinstance( + event, + ( + h2.events.ResponseReceived, + h2.events.DataReceived, + h2.events.StreamEnded, + h2.events.StreamReset, + ), + ): + if event.stream_id in self._events: + self._events[event.stream_id].append(event) + + elif isinstance(event, h2.events.ConnectionTerminated): + self._connection_terminated = event + + self._write_outgoing_data(request) + + def _receive_remote_settings_change(self, event: h2.events.Event) -> None: + max_concurrent_streams = event.changed_settings.get( + h2.settings.SettingCodes.MAX_CONCURRENT_STREAMS + ) + if max_concurrent_streams: + new_max_streams = min( + max_concurrent_streams.new_value, + self._h2_state.local_settings.max_concurrent_streams, + ) + if new_max_streams and new_max_streams != self._max_streams: + while new_max_streams > self._max_streams: + self._max_streams_semaphore.release() + self._max_streams += 1 + while new_max_streams < self._max_streams: + self._max_streams_semaphore.acquire() + self._max_streams -= 1 + + def _response_closed(self, stream_id: int) -> None: + self._max_streams_semaphore.release() + del self._events[stream_id] + with self._state_lock: + if self._connection_terminated and not self._events: + self.close() + + elif self._state == HTTPConnectionState.ACTIVE and not self._events: + self._state = HTTPConnectionState.IDLE + if self._keepalive_expiry is not None: + now = time.monotonic() + self._expire_at = now + self._keepalive_expiry + if self._used_all_stream_ids: # pragma: nocover + self.close() + + def close(self) -> None: + # Note that this method unilaterally closes the connection, and does + # not have any kind of locking in place around it. + self._h2_state.close_connection() + self._state = HTTPConnectionState.CLOSED + self._network_stream.close() + + # Wrappers around network read/write operations... + + def _read_incoming_data( + self, request: Request + ) -> typing.List[h2.events.Event]: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("read", None) + + if self._read_exception is not None: + raise self._read_exception # pragma: nocover + + try: + data = self._network_stream.read(self.READ_NUM_BYTES, timeout) + if data == b"": + raise RemoteProtocolError("Server disconnected") + except Exception as exc: + # If we get a network error we should: + # + # 1. Save the exception and just raise it immediately on any future reads. + # (For example, this means that a single read timeout or disconnect will + # immediately close all pending streams. Without requiring multiple + # sequential timeouts.) + # 2. Mark the connection as errored, so that we don't accept any other + # incoming requests. + self._read_exception = exc + self._connection_error = True + raise exc + + events: typing.List[h2.events.Event] = self._h2_state.receive_data(data) + + return events + + def _write_outgoing_data(self, request: Request) -> None: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("write", None) + + with self._write_lock: + data_to_send = self._h2_state.data_to_send() + + if self._write_exception is not None: + raise self._write_exception # pragma: nocover + + try: + self._network_stream.write(data_to_send, timeout) + except Exception as exc: # pragma: nocover + # If we get a network error we should: + # + # 1. Save the exception and just raise it immediately on any future write. + # (For example, this means that a single write timeout or disconnect will + # immediately close all pending streams. Without requiring multiple + # sequential timeouts.) + # 2. Mark the connection as errored, so that we don't accept any other + # incoming requests. + self._write_exception = exc + self._connection_error = True + raise exc + + # Flow control... + + def _wait_for_outgoing_flow(self, request: Request, stream_id: int) -> int: + """ + Returns the maximum allowable outgoing flow for a given stream. + + If the allowable flow is zero, then waits on the network until + WindowUpdated frames have increased the flow rate. + https://tools.ietf.org/html/rfc7540#section-6.9 + """ + local_flow: int = self._h2_state.local_flow_control_window(stream_id) + max_frame_size: int = self._h2_state.max_outbound_frame_size + flow = min(local_flow, max_frame_size) + while flow == 0: + self._receive_events(request) + local_flow = self._h2_state.local_flow_control_window(stream_id) + max_frame_size = self._h2_state.max_outbound_frame_size + flow = min(local_flow, max_frame_size) + return flow + + # Interface for connection pooling... + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._origin + + def is_available(self) -> bool: + return ( + self._state != HTTPConnectionState.CLOSED + and not self._connection_error + and not self._used_all_stream_ids + and not ( + self._h2_state.state_machine.state + == h2.connection.ConnectionState.CLOSED + ) + ) + + def has_expired(self) -> bool: + now = time.monotonic() + return self._expire_at is not None and now > self._expire_at + + def is_idle(self) -> bool: + return self._state == HTTPConnectionState.IDLE + + def is_closed(self) -> bool: + return self._state == HTTPConnectionState.CLOSED + + def info(self) -> str: + origin = str(self._origin) + return ( + f"{origin!r}, HTTP/2, {self._state.name}, " + f"Request Count: {self._request_count}" + ) + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + origin = str(self._origin) + return ( + f"<{class_name} [{origin!r}, {self._state.name}, " + f"Request Count: {self._request_count}]>" + ) + + # These context managers are not used in the standard flow, but are + # useful for testing or working with connection instances directly. + + def __enter__(self) -> "HTTP2Connection": + return self + + def __exit__( + self, + exc_type: typing.Optional[typing.Type[BaseException]] = None, + exc_value: typing.Optional[BaseException] = None, + traceback: typing.Optional[types.TracebackType] = None, + ) -> None: + self.close() + + +class HTTP2ConnectionByteStream: + def __init__( + self, connection: HTTP2Connection, request: Request, stream_id: int + ) -> None: + self._connection = connection + self._request = request + self._stream_id = stream_id + self._closed = False + + def __iter__(self) -> typing.Iterator[bytes]: + kwargs = {"request": self._request, "stream_id": self._stream_id} + try: + with Trace("receive_response_body", logger, self._request, kwargs): + for chunk in self._connection._receive_response_body( + request=self._request, stream_id=self._stream_id + ): + yield chunk + except BaseException as exc: + # If we get an exception while streaming the response, + # we want to close the response (and possibly the connection) + # before raising that exception. + with ShieldCancellation(): + self.close() + raise exc + + def close(self) -> None: + if not self._closed: + self._closed = True + kwargs = {"stream_id": self._stream_id} + with Trace("response_closed", logger, self._request, kwargs): + self._connection._response_closed(stream_id=self._stream_id) diff --git a/packages/httpcore/_sync/http_proxy.py b/packages/httpcore/_sync/http_proxy.py index 78c02e29b..6acac9a7c 100644 --- a/packages/httpcore/_sync/http_proxy.py +++ b/packages/httpcore/_sync/http_proxy.py @@ -1,35 +1,45 @@ -from http import HTTPStatus -from ssl import SSLContext -from typing import Tuple, cast +import logging +import ssl +from base64 import b64encode +from typing import Iterable, List, Mapping, Optional, Sequence, Tuple, Union -from .._bytestreams import ByteStream +from .._backends.base import SOCKET_OPTION, NetworkBackend from .._exceptions import ProxyError -from .._types import URL, Headers, TimeoutDict -from .._utils import get_logger, url_to_origin -from .base import SyncByteStream -from .connection import SyncHTTPConnection -from .connection_pool import SyncConnectionPool, ResponseByteStream - -logger = get_logger(__name__) - - -def get_reason_phrase(status_code: int) -> str: - try: - return HTTPStatus(status_code).phrase - except ValueError: - return "" +from .._models import ( + URL, + Origin, + Request, + Response, + enforce_bytes, + enforce_headers, + enforce_url, +) +from .._ssl import default_ssl_context +from .._synchronization import Lock +from .._trace import Trace +from .connection import HTTPConnection +from .connection_pool import ConnectionPool +from .http11 import HTTP11Connection +from .interfaces import ConnectionInterface + +HeadersAsSequence = Sequence[Tuple[Union[bytes, str], Union[bytes, str]]] +HeadersAsMapping = Mapping[Union[bytes, str], Union[bytes, str]] + + +logger = logging.getLogger("httpcore.proxy") def merge_headers( - default_headers: Headers = None, override_headers: Headers = None -) -> Headers: + default_headers: Optional[Sequence[Tuple[bytes, bytes]]] = None, + override_headers: Optional[Sequence[Tuple[bytes, bytes]]] = None, +) -> List[Tuple[bytes, bytes]]: """ - Append default_headers and override_headers, de-duplicating if a key existing in - both cases. + Append default_headers and override_headers, de-duplicating if a key exists + in both cases. """ - default_headers = [] if default_headers is None else default_headers - override_headers = [] if override_headers is None else override_headers - has_override = set([key.lower() for key, value in override_headers]) + default_headers = [] if default_headers is None else list(default_headers) + override_headers = [] if override_headers is None else list(override_headers) + has_override = set(key.lower() for key, value in override_headers) default_headers = [ (key, value) for key, value in default_headers @@ -38,253 +48,321 @@ def merge_headers( return default_headers + override_headers -class SyncHTTPProxy(SyncConnectionPool): +def build_auth_header(username: bytes, password: bytes) -> bytes: + userpass = username + b":" + password + return b"Basic " + b64encode(userpass) + + +class HTTPProxy(ConnectionPool): """ - A connection pool for making HTTP requests via an HTTP proxy. - - Parameters - ---------- - proxy_url: - The URL of the proxy service as a 4-tuple of (scheme, host, port, path). - proxy_headers: - A list of proxy headers to include. - proxy_mode: - A proxy mode to operate in. May be "DEFAULT", "FORWARD_ONLY", or "TUNNEL_ONLY". - ssl_context: - An SSL context to use for verifying connections. - max_connections: - The maximum number of concurrent connections to allow. - max_keepalive_connections: - The maximum number of connections to allow before closing keep-alive - connections. - http2: - Enable HTTP/2 support. + A connection pool that sends requests via an HTTP proxy. """ def __init__( self, - proxy_url: URL, - proxy_headers: Headers = None, - proxy_mode: str = "DEFAULT", - ssl_context: SSLContext = None, - max_connections: int = None, - max_keepalive_connections: int = None, - keepalive_expiry: float = None, + proxy_url: Union[URL, bytes, str], + proxy_auth: Optional[Tuple[Union[bytes, str], Union[bytes, str]]] = None, + proxy_headers: Union[HeadersAsMapping, HeadersAsSequence, None] = None, + ssl_context: Optional[ssl.SSLContext] = None, + proxy_ssl_context: Optional[ssl.SSLContext] = None, + max_connections: Optional[int] = 10, + max_keepalive_connections: Optional[int] = None, + keepalive_expiry: Optional[float] = None, + http1: bool = True, http2: bool = False, - backend: str = "sync", - # Deprecated argument style: - max_keepalive: int = None, - ): - assert proxy_mode in ("DEFAULT", "FORWARD_ONLY", "TUNNEL_ONLY") - - self.proxy_origin = url_to_origin(proxy_url) - self.proxy_headers = [] if proxy_headers is None else proxy_headers - self.proxy_mode = proxy_mode + retries: int = 0, + local_address: Optional[str] = None, + uds: Optional[str] = None, + network_backend: Optional[NetworkBackend] = None, + socket_options: Optional[Iterable[SOCKET_OPTION]] = None, + ) -> None: + """ + A connection pool for making HTTP requests. + + Parameters: + proxy_url: The URL to use when connecting to the proxy server. + For example `"http://127.0.0.1:8080/"`. + proxy_auth: Any proxy authentication as a two-tuple of + (username, password). May be either bytes or ascii-only str. + proxy_headers: Any HTTP headers to use for the proxy requests. + For example `{"Proxy-Authorization": "Basic :"}`. + ssl_context: An SSL context to use for verifying connections. + If not specified, the default `httpcore.default_ssl_context()` + will be used. + proxy_ssl_context: The same as `ssl_context`, but for a proxy server rather than a remote origin. + max_connections: The maximum number of concurrent HTTP connections that + the pool should allow. Any attempt to send a request on a pool that + would exceed this amount will block until a connection is available. + max_keepalive_connections: The maximum number of idle HTTP connections + that will be maintained in the pool. + keepalive_expiry: The duration in seconds that an idle HTTP connection + may be maintained for before being expired from the pool. + http1: A boolean indicating if HTTP/1.1 requests should be supported + by the connection pool. Defaults to True. + http2: A boolean indicating if HTTP/2 requests should be supported by + the connection pool. Defaults to False. + retries: The maximum number of retries when trying to establish + a connection. + local_address: Local address to connect from. Can also be used to + connect using a particular address family. Using + `local_address="0.0.0.0"` will connect using an `AF_INET` address + (IPv4), while using `local_address="::"` will connect using an + `AF_INET6` address (IPv6). + uds: Path to a Unix Domain Socket to use instead of TCP sockets. + network_backend: A backend instance to use for handling network I/O. + """ super().__init__( ssl_context=ssl_context, max_connections=max_connections, max_keepalive_connections=max_keepalive_connections, keepalive_expiry=keepalive_expiry, + http1=http1, http2=http2, - backend=backend, - max_keepalive=max_keepalive, + network_backend=network_backend, + retries=retries, + local_address=local_address, + uds=uds, + socket_options=socket_options, ) - def handle_request( - self, - method: bytes, - url: URL, - headers: Headers, - stream: SyncByteStream, - extensions: dict, - ) -> Tuple[int, Headers, SyncByteStream, dict]: - if self._keepalive_expiry is not None: - self._keepalive_sweep() - + self._proxy_url = enforce_url(proxy_url, name="proxy_url") if ( - self.proxy_mode == "DEFAULT" and url[0] == b"http" - ) or self.proxy_mode == "FORWARD_ONLY": - # By default HTTP requests should be forwarded. - logger.trace( - "forward_request proxy_origin=%r proxy_headers=%r method=%r url=%r", - self.proxy_origin, - self.proxy_headers, - method, - url, - ) - return self._forward_request( - method, url, headers=headers, stream=stream, extensions=extensions - ) - else: - # By default HTTPS should be tunnelled. - logger.trace( - "tunnel_request proxy_origin=%r proxy_headers=%r method=%r url=%r", - self.proxy_origin, - self.proxy_headers, - method, - url, - ) - return self._tunnel_request( - method, url, headers=headers, stream=stream, extensions=extensions + self._proxy_url.scheme == b"http" and proxy_ssl_context is not None + ): # pragma: no cover + raise RuntimeError( + "The `proxy_ssl_context` argument is not allowed for the http scheme" ) - def _forward_request( - self, - method: bytes, - url: URL, - headers: Headers, - stream: SyncByteStream, - extensions: dict, - ) -> Tuple[int, Headers, SyncByteStream, dict]: - """ - Forwarded proxy requests include the entire URL as the HTTP target, - rather than just the path. - """ - timeout = cast(TimeoutDict, extensions.get("timeout", {})) - origin = self.proxy_origin - connection = self._get_connection_from_pool(origin) - - if connection is None: - connection = SyncHTTPConnection( - origin=origin, - http2=self._http2, + self._ssl_context = ssl_context + self._proxy_ssl_context = proxy_ssl_context + self._proxy_headers = enforce_headers(proxy_headers, name="proxy_headers") + if proxy_auth is not None: + username = enforce_bytes(proxy_auth[0], name="proxy_auth") + password = enforce_bytes(proxy_auth[1], name="proxy_auth") + authorization = build_auth_header(username, password) + self._proxy_headers = [ + (b"Proxy-Authorization", authorization) + ] + self._proxy_headers + + def create_connection(self, origin: Origin) -> ConnectionInterface: + if origin.scheme == b"http": + return ForwardHTTPConnection( + proxy_origin=self._proxy_url.origin, + proxy_headers=self._proxy_headers, + remote_origin=origin, keepalive_expiry=self._keepalive_expiry, - ssl_context=self._ssl_context, + network_backend=self._network_backend, + proxy_ssl_context=self._proxy_ssl_context, ) - self._add_to_pool(connection, timeout) - - # Issue a forwarded proxy request... - - # GET https://www.example.org/path HTTP/1.1 - # [proxy headers] - # [headers] - scheme, host, port, path = url - if port is None: - target = b"%b://%b%b" % (scheme, host, path) - else: - target = b"%b://%b:%d%b" % (scheme, host, port, path) - - url = self.proxy_origin + (target,) - headers = merge_headers(self.proxy_headers, headers) - - ( - status_code, - headers, - stream, - extensions, - ) = connection.handle_request( - method, url, headers=headers, stream=stream, extensions=extensions + return TunnelHTTPConnection( + proxy_origin=self._proxy_url.origin, + proxy_headers=self._proxy_headers, + remote_origin=origin, + ssl_context=self._ssl_context, + proxy_ssl_context=self._proxy_ssl_context, + keepalive_expiry=self._keepalive_expiry, + http1=self._http1, + http2=self._http2, + network_backend=self._network_backend, ) - wrapped_stream = ResponseByteStream( - stream, connection=connection, callback=self._response_closed + +class ForwardHTTPConnection(ConnectionInterface): + def __init__( + self, + proxy_origin: Origin, + remote_origin: Origin, + proxy_headers: Union[HeadersAsMapping, HeadersAsSequence, None] = None, + keepalive_expiry: Optional[float] = None, + network_backend: Optional[NetworkBackend] = None, + socket_options: Optional[Iterable[SOCKET_OPTION]] = None, + proxy_ssl_context: Optional[ssl.SSLContext] = None, + ) -> None: + self._connection = HTTPConnection( + origin=proxy_origin, + keepalive_expiry=keepalive_expiry, + network_backend=network_backend, + socket_options=socket_options, + ssl_context=proxy_ssl_context, + ) + self._proxy_origin = proxy_origin + self._proxy_headers = enforce_headers(proxy_headers, name="proxy_headers") + self._remote_origin = remote_origin + + def handle_request(self, request: Request) -> Response: + headers = merge_headers(self._proxy_headers, request.headers) + url = URL( + scheme=self._proxy_origin.scheme, + host=self._proxy_origin.host, + port=self._proxy_origin.port, + target=bytes(request.url), + ) + proxy_request = Request( + method=request.method, + url=url, + headers=headers, + content=request.stream, + extensions=request.extensions, ) + return self._connection.handle_request(proxy_request) - return status_code, headers, wrapped_stream, extensions + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._remote_origin - def _tunnel_request( - self, - method: bytes, - url: URL, - headers: Headers, - stream: SyncByteStream, - extensions: dict, - ) -> Tuple[int, Headers, SyncByteStream, dict]: - """ - Tunnelled proxy requests require an initial CONNECT request to - establish the connection, and then send regular requests. - """ - timeout = cast(TimeoutDict, extensions.get("timeout", {})) - origin = url_to_origin(url) - connection = self._get_connection_from_pool(origin) + def close(self) -> None: + self._connection.close() - if connection is None: - scheme, host, port = origin + def info(self) -> str: + return self._connection.info() - # First, create a connection to the proxy server - proxy_connection = SyncHTTPConnection( - origin=self.proxy_origin, - http2=self._http2, - keepalive_expiry=self._keepalive_expiry, - ssl_context=self._ssl_context, - ) + def is_available(self) -> bool: + return self._connection.is_available() + + def has_expired(self) -> bool: + return self._connection.has_expired() + + def is_idle(self) -> bool: + return self._connection.is_idle() - # Issue a CONNECT request... - - # CONNECT www.example.org:80 HTTP/1.1 - # [proxy-headers] - target = b"%b:%d" % (host, port) - connect_url = self.proxy_origin + (target,) - connect_headers = [(b"Host", target), (b"Accept", b"*/*")] - connect_headers = merge_headers(connect_headers, self.proxy_headers) - - try: - ( - proxy_status_code, - _, - proxy_stream, - _, - ) = proxy_connection.handle_request( - b"CONNECT", - connect_url, + def is_closed(self) -> bool: + return self._connection.is_closed() + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} [{self.info()}]>" + + +class TunnelHTTPConnection(ConnectionInterface): + def __init__( + self, + proxy_origin: Origin, + remote_origin: Origin, + ssl_context: Optional[ssl.SSLContext] = None, + proxy_ssl_context: Optional[ssl.SSLContext] = None, + proxy_headers: Optional[Sequence[Tuple[bytes, bytes]]] = None, + keepalive_expiry: Optional[float] = None, + http1: bool = True, + http2: bool = False, + network_backend: Optional[NetworkBackend] = None, + socket_options: Optional[Iterable[SOCKET_OPTION]] = None, + ) -> None: + self._connection: ConnectionInterface = HTTPConnection( + origin=proxy_origin, + keepalive_expiry=keepalive_expiry, + network_backend=network_backend, + socket_options=socket_options, + ssl_context=proxy_ssl_context, + ) + self._proxy_origin = proxy_origin + self._remote_origin = remote_origin + self._ssl_context = ssl_context + self._proxy_ssl_context = proxy_ssl_context + self._proxy_headers = enforce_headers(proxy_headers, name="proxy_headers") + self._keepalive_expiry = keepalive_expiry + self._http1 = http1 + self._http2 = http2 + self._connect_lock = Lock() + self._connected = False + + def handle_request(self, request: Request) -> Response: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("connect", None) + + with self._connect_lock: + if not self._connected: + target = b"%b:%d" % (self._remote_origin.host, self._remote_origin.port) + + connect_url = URL( + scheme=self._proxy_origin.scheme, + host=self._proxy_origin.host, + port=self._proxy_origin.port, + target=target, + ) + connect_headers = merge_headers( + [(b"Host", target), (b"Accept", b"*/*")], self._proxy_headers + ) + connect_request = Request( + method=b"CONNECT", + url=connect_url, headers=connect_headers, - stream=ByteStream(b""), - extensions=extensions, + extensions=request.extensions, ) - - proxy_reason = get_reason_phrase(proxy_status_code) - logger.trace( - "tunnel_response proxy_status_code=%r proxy_reason=%r ", - proxy_status_code, - proxy_reason, + connect_response = self._connection.handle_request( + connect_request ) - # Read the response data without closing the socket - for _ in proxy_stream: - pass - # See if the tunnel was successfully established. - if proxy_status_code < 200 or proxy_status_code > 299: - msg = "%d %s" % (proxy_status_code, proxy_reason) + if connect_response.status < 200 or connect_response.status > 299: + reason_bytes = connect_response.extensions.get("reason_phrase", b"") + reason_str = reason_bytes.decode("ascii", errors="ignore") + msg = "%d %s" % (connect_response.status, reason_str) + self._connection.close() raise ProxyError(msg) - # Upgrade to TLS if required - # We assume the target speaks TLS on the specified port - if scheme == b"https": - proxy_connection.start_tls(host, self._ssl_context, timeout) - except Exception as exc: - proxy_connection.close() - raise ProxyError(exc) - - # The CONNECT request is successful, so we have now SWITCHED PROTOCOLS. - # This means the proxy connection is now unusable, and we must create - # a new one for regular requests, making sure to use the same socket to - # retain the tunnel. - connection = SyncHTTPConnection( - origin=origin, - http2=self._http2, - keepalive_expiry=self._keepalive_expiry, - ssl_context=self._ssl_context, - socket=proxy_connection.socket, - ) - self._add_to_pool(connection, timeout) - - # Once the connection has been established we can send requests on - # it as normal. - ( - status_code, - headers, - stream, - extensions, - ) = connection.handle_request( - method, - url, - headers=headers, - stream=stream, - extensions=extensions, - ) + stream = connect_response.extensions["network_stream"] - wrapped_stream = ResponseByteStream( - stream, connection=connection, callback=self._response_closed - ) + # Upgrade the stream to SSL + ssl_context = ( + default_ssl_context() + if self._ssl_context is None + else self._ssl_context + ) + alpn_protocols = ["http/1.1", "h2"] if self._http2 else ["http/1.1"] + ssl_context.set_alpn_protocols(alpn_protocols) + + kwargs = { + "ssl_context": ssl_context, + "server_hostname": self._remote_origin.host.decode("ascii"), + "timeout": timeout, + } + with Trace("start_tls", logger, request, kwargs) as trace: + stream = stream.start_tls(**kwargs) + trace.return_value = stream + + # Determine if we should be using HTTP/1.1 or HTTP/2 + ssl_object = stream.get_extra_info("ssl_object") + http2_negotiated = ( + ssl_object is not None + and ssl_object.selected_alpn_protocol() == "h2" + ) + + # Create the HTTP/1.1 or HTTP/2 connection + if http2_negotiated or (self._http2 and not self._http1): + from .http2 import HTTP2Connection + + self._connection = HTTP2Connection( + origin=self._remote_origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + else: + self._connection = HTTP11Connection( + origin=self._remote_origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + + self._connected = True + return self._connection.handle_request(request) + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._remote_origin + + def close(self) -> None: + self._connection.close() + + def info(self) -> str: + return self._connection.info() + + def is_available(self) -> bool: + return self._connection.is_available() + + def has_expired(self) -> bool: + return self._connection.has_expired() + + def is_idle(self) -> bool: + return self._connection.is_idle() + + def is_closed(self) -> bool: + return self._connection.is_closed() - return status_code, headers, wrapped_stream, extensions + def __repr__(self) -> str: + return f"<{self.__class__.__name__} [{self.info()}]>" diff --git a/packages/httpcore/_sync/interfaces.py b/packages/httpcore/_sync/interfaces.py new file mode 100644 index 000000000..5e95be1ec --- /dev/null +++ b/packages/httpcore/_sync/interfaces.py @@ -0,0 +1,135 @@ +from contextlib import contextmanager +from typing import Iterator, Optional, Union + +from .._models import ( + URL, + Extensions, + HeaderTypes, + Origin, + Request, + Response, + enforce_bytes, + enforce_headers, + enforce_url, + include_request_headers, +) + + +class RequestInterface: + def request( + self, + method: Union[bytes, str], + url: Union[URL, bytes, str], + *, + headers: HeaderTypes = None, + content: Union[bytes, Iterator[bytes], None] = None, + extensions: Optional[Extensions] = None, + ) -> Response: + # Strict type checking on our parameters. + method = enforce_bytes(method, name="method") + url = enforce_url(url, name="url") + headers = enforce_headers(headers, name="headers") + + # Include Host header, and optionally Content-Length or Transfer-Encoding. + headers = include_request_headers(headers, url=url, content=content) + + request = Request( + method=method, + url=url, + headers=headers, + content=content, + extensions=extensions, + ) + response = self.handle_request(request) + try: + response.read() + finally: + response.close() + return response + + @contextmanager + def stream( + self, + method: Union[bytes, str], + url: Union[URL, bytes, str], + *, + headers: HeaderTypes = None, + content: Union[bytes, Iterator[bytes], None] = None, + extensions: Optional[Extensions] = None, + ) -> Iterator[Response]: + # Strict type checking on our parameters. + method = enforce_bytes(method, name="method") + url = enforce_url(url, name="url") + headers = enforce_headers(headers, name="headers") + + # Include Host header, and optionally Content-Length or Transfer-Encoding. + headers = include_request_headers(headers, url=url, content=content) + + request = Request( + method=method, + url=url, + headers=headers, + content=content, + extensions=extensions, + ) + response = self.handle_request(request) + try: + yield response + finally: + response.close() + + def handle_request(self, request: Request) -> Response: + raise NotImplementedError() # pragma: nocover + + +class ConnectionInterface(RequestInterface): + def close(self) -> None: + raise NotImplementedError() # pragma: nocover + + def info(self) -> str: + raise NotImplementedError() # pragma: nocover + + def can_handle_request(self, origin: Origin) -> bool: + raise NotImplementedError() # pragma: nocover + + def is_available(self) -> bool: + """ + Return `True` if the connection is currently able to accept an + outgoing request. + + An HTTP/1.1 connection will only be available if it is currently idle. + + An HTTP/2 connection will be available so long as the stream ID space is + not yet exhausted, and the connection is not in an error state. + + While the connection is being established we may not yet know if it is going + to result in an HTTP/1.1 or HTTP/2 connection. The connection should be + treated as being available, but might ultimately raise `NewConnectionRequired` + required exceptions if multiple requests are attempted over a connection + that ends up being established as HTTP/1.1. + """ + raise NotImplementedError() # pragma: nocover + + def has_expired(self) -> bool: + """ + Return `True` if the connection is in a state where it should be closed. + + This either means that the connection is idle and it has passed the + expiry time on its keep-alive, or that server has sent an EOF. + """ + raise NotImplementedError() # pragma: nocover + + def is_idle(self) -> bool: + """ + Return `True` if the connection is currently idle. + """ + raise NotImplementedError() # pragma: nocover + + def is_closed(self) -> bool: + """ + Return `True` if the connection has been closed. + + Used when a response is closed to determine if the connection may be + returned to the connection pool or not. + """ + raise NotImplementedError() # pragma: nocover diff --git a/packages/httpcore/_sync/socks_proxy.py b/packages/httpcore/_sync/socks_proxy.py new file mode 100644 index 000000000..502e4d7fe --- /dev/null +++ b/packages/httpcore/_sync/socks_proxy.py @@ -0,0 +1,342 @@ +import logging +import ssl +import typing + +from socksio import socks5 + +from .._backends.sync import SyncBackend +from .._backends.base import NetworkBackend, NetworkStream +from .._exceptions import ConnectionNotAvailable, ProxyError +from .._models import URL, Origin, Request, Response, enforce_bytes, enforce_url +from .._ssl import default_ssl_context +from .._synchronization import Lock +from .._trace import Trace +from .connection_pool import ConnectionPool +from .http11 import HTTP11Connection +from .interfaces import ConnectionInterface + +logger = logging.getLogger("httpcore.socks") + + +AUTH_METHODS = { + b"\x00": "NO AUTHENTICATION REQUIRED", + b"\x01": "GSSAPI", + b"\x02": "USERNAME/PASSWORD", + b"\xff": "NO ACCEPTABLE METHODS", +} + +REPLY_CODES = { + b"\x00": "Succeeded", + b"\x01": "General SOCKS server failure", + b"\x02": "Connection not allowed by ruleset", + b"\x03": "Network unreachable", + b"\x04": "Host unreachable", + b"\x05": "Connection refused", + b"\x06": "TTL expired", + b"\x07": "Command not supported", + b"\x08": "Address type not supported", +} + + +def _init_socks5_connection( + stream: NetworkStream, + *, + host: bytes, + port: int, + auth: typing.Optional[typing.Tuple[bytes, bytes]] = None, +) -> None: + conn = socks5.SOCKS5Connection() + + # Auth method request + auth_method = ( + socks5.SOCKS5AuthMethod.NO_AUTH_REQUIRED + if auth is None + else socks5.SOCKS5AuthMethod.USERNAME_PASSWORD + ) + conn.send(socks5.SOCKS5AuthMethodsRequest([auth_method])) + outgoing_bytes = conn.data_to_send() + stream.write(outgoing_bytes) + + # Auth method response + incoming_bytes = stream.read(max_bytes=4096) + response = conn.receive_data(incoming_bytes) + assert isinstance(response, socks5.SOCKS5AuthReply) + if response.method != auth_method: + requested = AUTH_METHODS.get(auth_method, "UNKNOWN") + responded = AUTH_METHODS.get(response.method, "UNKNOWN") + raise ProxyError( + f"Requested {requested} from proxy server, but got {responded}." + ) + + if response.method == socks5.SOCKS5AuthMethod.USERNAME_PASSWORD: + # Username/password request + assert auth is not None + username, password = auth + conn.send(socks5.SOCKS5UsernamePasswordRequest(username, password)) + outgoing_bytes = conn.data_to_send() + stream.write(outgoing_bytes) + + # Username/password response + incoming_bytes = stream.read(max_bytes=4096) + response = conn.receive_data(incoming_bytes) + assert isinstance(response, socks5.SOCKS5UsernamePasswordReply) + if not response.success: + raise ProxyError("Invalid username/password") + + # Connect request + conn.send( + socks5.SOCKS5CommandRequest.from_address( + socks5.SOCKS5Command.CONNECT, (host, port) + ) + ) + outgoing_bytes = conn.data_to_send() + stream.write(outgoing_bytes) + + # Connect response + incoming_bytes = stream.read(max_bytes=4096) + response = conn.receive_data(incoming_bytes) + assert isinstance(response, socks5.SOCKS5Reply) + if response.reply_code != socks5.SOCKS5ReplyCode.SUCCEEDED: + reply_code = REPLY_CODES.get(response.reply_code, "UNKOWN") + raise ProxyError(f"Proxy Server could not connect: {reply_code}.") + + +class SOCKSProxy(ConnectionPool): + """ + A connection pool that sends requests via an HTTP proxy. + """ + + def __init__( + self, + proxy_url: typing.Union[URL, bytes, str], + proxy_auth: typing.Optional[ + typing.Tuple[typing.Union[bytes, str], typing.Union[bytes, str]] + ] = None, + ssl_context: typing.Optional[ssl.SSLContext] = None, + max_connections: typing.Optional[int] = 10, + max_keepalive_connections: typing.Optional[int] = None, + keepalive_expiry: typing.Optional[float] = None, + http1: bool = True, + http2: bool = False, + retries: int = 0, + network_backend: typing.Optional[NetworkBackend] = None, + ) -> None: + """ + A connection pool for making HTTP requests. + + Parameters: + proxy_url: The URL to use when connecting to the proxy server. + For example `"http://127.0.0.1:8080/"`. + ssl_context: An SSL context to use for verifying connections. + If not specified, the default `httpcore.default_ssl_context()` + will be used. + max_connections: The maximum number of concurrent HTTP connections that + the pool should allow. Any attempt to send a request on a pool that + would exceed this amount will block until a connection is available. + max_keepalive_connections: The maximum number of idle HTTP connections + that will be maintained in the pool. + keepalive_expiry: The duration in seconds that an idle HTTP connection + may be maintained for before being expired from the pool. + http1: A boolean indicating if HTTP/1.1 requests should be supported + by the connection pool. Defaults to True. + http2: A boolean indicating if HTTP/2 requests should be supported by + the connection pool. Defaults to False. + retries: The maximum number of retries when trying to establish + a connection. + local_address: Local address to connect from. Can also be used to + connect using a particular address family. Using + `local_address="0.0.0.0"` will connect using an `AF_INET` address + (IPv4), while using `local_address="::"` will connect using an + `AF_INET6` address (IPv6). + uds: Path to a Unix Domain Socket to use instead of TCP sockets. + network_backend: A backend instance to use for handling network I/O. + """ + super().__init__( + ssl_context=ssl_context, + max_connections=max_connections, + max_keepalive_connections=max_keepalive_connections, + keepalive_expiry=keepalive_expiry, + http1=http1, + http2=http2, + network_backend=network_backend, + retries=retries, + ) + self._ssl_context = ssl_context + self._proxy_url = enforce_url(proxy_url, name="proxy_url") + if proxy_auth is not None: + username, password = proxy_auth + username_bytes = enforce_bytes(username, name="proxy_auth") + password_bytes = enforce_bytes(password, name="proxy_auth") + self._proxy_auth: typing.Optional[typing.Tuple[bytes, bytes]] = ( + username_bytes, + password_bytes, + ) + else: + self._proxy_auth = None + + def create_connection(self, origin: Origin) -> ConnectionInterface: + return Socks5Connection( + proxy_origin=self._proxy_url.origin, + remote_origin=origin, + proxy_auth=self._proxy_auth, + ssl_context=self._ssl_context, + keepalive_expiry=self._keepalive_expiry, + http1=self._http1, + http2=self._http2, + network_backend=self._network_backend, + ) + + +class Socks5Connection(ConnectionInterface): + def __init__( + self, + proxy_origin: Origin, + remote_origin: Origin, + proxy_auth: typing.Optional[typing.Tuple[bytes, bytes]] = None, + ssl_context: typing.Optional[ssl.SSLContext] = None, + keepalive_expiry: typing.Optional[float] = None, + http1: bool = True, + http2: bool = False, + network_backend: typing.Optional[NetworkBackend] = None, + ) -> None: + self._proxy_origin = proxy_origin + self._remote_origin = remote_origin + self._proxy_auth = proxy_auth + self._ssl_context = ssl_context + self._keepalive_expiry = keepalive_expiry + self._http1 = http1 + self._http2 = http2 + + self._network_backend: NetworkBackend = ( + SyncBackend() if network_backend is None else network_backend + ) + self._connect_lock = Lock() + self._connection: typing.Optional[ConnectionInterface] = None + self._connect_failed = False + + def handle_request(self, request: Request) -> Response: + timeouts = request.extensions.get("timeout", {}) + sni_hostname = request.extensions.get("sni_hostname", None) + timeout = timeouts.get("connect", None) + + with self._connect_lock: + if self._connection is None: + try: + # Connect to the proxy + kwargs = { + "host": self._proxy_origin.host.decode("ascii"), + "port": self._proxy_origin.port, + "timeout": timeout, + } + with Trace("connect_tcp", logger, request, kwargs) as trace: + stream = self._network_backend.connect_tcp(**kwargs) + trace.return_value = stream + + # Connect to the remote host using socks5 + kwargs = { + "stream": stream, + "host": self._remote_origin.host.decode("ascii"), + "port": self._remote_origin.port, + "auth": self._proxy_auth, + } + with Trace( + "setup_socks5_connection", logger, request, kwargs + ) as trace: + _init_socks5_connection(**kwargs) + trace.return_value = stream + + # Upgrade the stream to SSL + if self._remote_origin.scheme == b"https": + ssl_context = ( + default_ssl_context() + if self._ssl_context is None + else self._ssl_context + ) + alpn_protocols = ( + ["http/1.1", "h2"] if self._http2 else ["http/1.1"] + ) + ssl_context.set_alpn_protocols(alpn_protocols) + + kwargs = { + "ssl_context": ssl_context, + "server_hostname": sni_hostname + or self._remote_origin.host.decode("ascii"), + "timeout": timeout, + } + with Trace("start_tls", logger, request, kwargs) as trace: + stream = stream.start_tls(**kwargs) + trace.return_value = stream + + # Determine if we should be using HTTP/1.1 or HTTP/2 + ssl_object = stream.get_extra_info("ssl_object") + http2_negotiated = ( + ssl_object is not None + and ssl_object.selected_alpn_protocol() == "h2" + ) + + # Create the HTTP/1.1 or HTTP/2 connection + if http2_negotiated or ( + self._http2 and not self._http1 + ): # pragma: nocover + from .http2 import HTTP2Connection + + self._connection = HTTP2Connection( + origin=self._remote_origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + else: + self._connection = HTTP11Connection( + origin=self._remote_origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + except Exception as exc: + self._connect_failed = True + raise exc + elif not self._connection.is_available(): # pragma: nocover + raise ConnectionNotAvailable() + + return self._connection.handle_request(request) + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._remote_origin + + def close(self) -> None: + if self._connection is not None: + self._connection.close() + + def is_available(self) -> bool: + if self._connection is None: # pragma: nocover + # If HTTP/2 support is enabled, and the resulting connection could + # end up as HTTP/2 then we should indicate the connection as being + # available to service multiple requests. + return ( + self._http2 + and (self._remote_origin.scheme == b"https" or not self._http1) + and not self._connect_failed + ) + return self._connection.is_available() + + def has_expired(self) -> bool: + if self._connection is None: # pragma: nocover + return self._connect_failed + return self._connection.has_expired() + + def is_idle(self) -> bool: + if self._connection is None: # pragma: nocover + return self._connect_failed + return self._connection.is_idle() + + def is_closed(self) -> bool: + if self._connection is None: # pragma: nocover + return self._connect_failed + return self._connection.is_closed() + + def info(self) -> str: + if self._connection is None: # pragma: nocover + return "CONNECTION FAILED" if self._connect_failed else "CONNECTING" + return self._connection.info() + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} [{self.info()}]>" diff --git a/packages/httpcore/_synchronization.py b/packages/httpcore/_synchronization.py new file mode 100644 index 000000000..bae27c1b1 --- /dev/null +++ b/packages/httpcore/_synchronization.py @@ -0,0 +1,279 @@ +import threading +from types import TracebackType +from typing import Optional, Type + +import sniffio + +from ._exceptions import ExceptionMapping, PoolTimeout, map_exceptions + +# Our async synchronization primatives use either 'anyio' or 'trio' depending +# on if they're running under asyncio or trio. + +try: + import trio +except ImportError: # pragma: nocover + trio = None # type: ignore + +try: + import anyio +except ImportError: # pragma: nocover + anyio = None # type: ignore + + +class AsyncLock: + def __init__(self) -> None: + self._backend = "" + + def setup(self) -> None: + """ + Detect if we're running under 'asyncio' or 'trio' and create + a lock with the correct implementation. + """ + self._backend = sniffio.current_async_library() + if self._backend == "trio": + if trio is None: # pragma: nocover + raise RuntimeError( + "Running under trio, requires the 'trio' package to be installed." + ) + self._trio_lock = trio.Lock() + else: + if anyio is None: # pragma: nocover + raise RuntimeError( + "Running under asyncio requires the 'anyio' package to be installed." + ) + self._anyio_lock = anyio.Lock() + + async def __aenter__(self) -> "AsyncLock": + if not self._backend: + self.setup() + + if self._backend == "trio": + await self._trio_lock.acquire() + else: + await self._anyio_lock.acquire() + + return self + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]] = None, + exc_value: Optional[BaseException] = None, + traceback: Optional[TracebackType] = None, + ) -> None: + if self._backend == "trio": + self._trio_lock.release() + else: + self._anyio_lock.release() + + +class AsyncEvent: + def __init__(self) -> None: + self._backend = "" + + def setup(self) -> None: + """ + Detect if we're running under 'asyncio' or 'trio' and create + a lock with the correct implementation. + """ + self._backend = sniffio.current_async_library() + if self._backend == "trio": + if trio is None: # pragma: nocover + raise RuntimeError( + "Running under trio requires the 'trio' package to be installed." + ) + self._trio_event = trio.Event() + else: + if anyio is None: # pragma: nocover + raise RuntimeError( + "Running under asyncio requires the 'anyio' package to be installed." + ) + self._anyio_event = anyio.Event() + + def set(self) -> None: + if not self._backend: + self.setup() + + if self._backend == "trio": + self._trio_event.set() + else: + self._anyio_event.set() + + async def wait(self, timeout: Optional[float] = None) -> None: + if not self._backend: + self.setup() + + if self._backend == "trio": + if trio is None: # pragma: nocover + raise RuntimeError( + "Running under trio requires the 'trio' package to be installed." + ) + + trio_exc_map: ExceptionMapping = {trio.TooSlowError: PoolTimeout} + timeout_or_inf = float("inf") if timeout is None else timeout + with map_exceptions(trio_exc_map): + with trio.fail_after(timeout_or_inf): + await self._trio_event.wait() + else: + if anyio is None: # pragma: nocover + raise RuntimeError( + "Running under asyncio requires the 'anyio' package to be installed." + ) + + anyio_exc_map: ExceptionMapping = {TimeoutError: PoolTimeout} + with map_exceptions(anyio_exc_map): + with anyio.fail_after(timeout): + await self._anyio_event.wait() + + +class AsyncSemaphore: + def __init__(self, bound: int) -> None: + self._bound = bound + self._backend = "" + + def setup(self) -> None: + """ + Detect if we're running under 'asyncio' or 'trio' and create + a semaphore with the correct implementation. + """ + self._backend = sniffio.current_async_library() + if self._backend == "trio": + if trio is None: # pragma: nocover + raise RuntimeError( + "Running under trio requires the 'trio' package to be installed." + ) + + self._trio_semaphore = trio.Semaphore( + initial_value=self._bound, max_value=self._bound + ) + else: + if anyio is None: # pragma: nocover + raise RuntimeError( + "Running under asyncio requires the 'anyio' package to be installed." + ) + + self._anyio_semaphore = anyio.Semaphore( + initial_value=self._bound, max_value=self._bound + ) + + async def acquire(self) -> None: + if not self._backend: + self.setup() + + if self._backend == "trio": + await self._trio_semaphore.acquire() + else: + await self._anyio_semaphore.acquire() + + async def release(self) -> None: + if self._backend == "trio": + self._trio_semaphore.release() + else: + self._anyio_semaphore.release() + + +class AsyncShieldCancellation: + # For certain portions of our codebase where we're dealing with + # closing connections during exception handling we want to shield + # the operation from being cancelled. + # + # with AsyncShieldCancellation(): + # ... # clean-up operations, shielded from cancellation. + + def __init__(self) -> None: + """ + Detect if we're running under 'asyncio' or 'trio' and create + a shielded scope with the correct implementation. + """ + self._backend = sniffio.current_async_library() + + if self._backend == "trio": + if trio is None: # pragma: nocover + raise RuntimeError( + "Running under trio requires the 'trio' package to be installed." + ) + + self._trio_shield = trio.CancelScope(shield=True) + else: + if anyio is None: # pragma: nocover + raise RuntimeError( + "Running under asyncio requires the 'anyio' package to be installed." + ) + + self._anyio_shield = anyio.CancelScope(shield=True) + + def __enter__(self) -> "AsyncShieldCancellation": + if self._backend == "trio": + self._trio_shield.__enter__() + else: + self._anyio_shield.__enter__() + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]] = None, + exc_value: Optional[BaseException] = None, + traceback: Optional[TracebackType] = None, + ) -> None: + if self._backend == "trio": + self._trio_shield.__exit__(exc_type, exc_value, traceback) + else: + self._anyio_shield.__exit__(exc_type, exc_value, traceback) + + +# Our thread-based synchronization primitives... + + +class Lock: + def __init__(self) -> None: + self._lock = threading.Lock() + + def __enter__(self) -> "Lock": + self._lock.acquire() + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]] = None, + exc_value: Optional[BaseException] = None, + traceback: Optional[TracebackType] = None, + ) -> None: + self._lock.release() + + +class Event: + def __init__(self) -> None: + self._event = threading.Event() + + def set(self) -> None: + self._event.set() + + def wait(self, timeout: Optional[float] = None) -> None: + if not self._event.wait(timeout=timeout): + raise PoolTimeout() # pragma: nocover + + +class Semaphore: + def __init__(self, bound: int) -> None: + self._semaphore = threading.Semaphore(value=bound) + + def acquire(self) -> None: + self._semaphore.acquire() + + def release(self) -> None: + self._semaphore.release() + + +class ShieldCancellation: + # Thread-synchronous codebases don't support cancellation semantics. + # We have this class because we need to mirror the async and sync + # cases within our package, but it's just a no-op. + def __enter__(self) -> "ShieldCancellation": + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]] = None, + exc_value: Optional[BaseException] = None, + traceback: Optional[TracebackType] = None, + ) -> None: + pass diff --git a/packages/httpcore/_threadlock.py b/packages/httpcore/_threadlock.py deleted file mode 100644 index 2ff2bc378..000000000 --- a/packages/httpcore/_threadlock.py +++ /dev/null @@ -1,35 +0,0 @@ -import threading -from types import TracebackType -from typing import Type - - -class ThreadLock: - """ - Provides thread safety when used as a sync context manager, or a - no-op when used as an async context manager. - """ - - def __init__(self) -> None: - self.lock = threading.Lock() - - def __enter__(self) -> None: - self.lock.acquire() - - def __exit__( - self, - exc_type: Type[BaseException] = None, - exc_value: BaseException = None, - traceback: TracebackType = None, - ) -> None: - self.lock.release() - - async def __aenter__(self) -> None: - pass - - async def __aexit__( - self, - exc_type: Type[BaseException] = None, - exc_value: BaseException = None, - traceback: TracebackType = None, - ) -> None: - pass diff --git a/packages/httpcore/_trace.py b/packages/httpcore/_trace.py new file mode 100644 index 000000000..b122a53e8 --- /dev/null +++ b/packages/httpcore/_trace.py @@ -0,0 +1,105 @@ +import inspect +import logging +from types import TracebackType +from typing import Any, Dict, Optional, Type + +from ._models import Request + + +class Trace: + def __init__( + self, + name: str, + logger: logging.Logger, + request: Optional[Request] = None, + kwargs: Optional[Dict[str, Any]] = None, + ) -> None: + self.name = name + self.logger = logger + self.trace_extension = ( + None if request is None else request.extensions.get("trace") + ) + self.debug = self.logger.isEnabledFor(logging.DEBUG) + self.kwargs = kwargs or {} + self.return_value: Any = None + self.should_trace = self.debug or self.trace_extension is not None + self.prefix = self.logger.name.split(".")[-1] + + def trace(self, name: str, info: Dict[str, Any]) -> None: + if self.trace_extension is not None: + prefix_and_name = f"{self.prefix}.{name}" + ret = self.trace_extension(prefix_and_name, info) + if inspect.iscoroutine(ret): # pragma: no cover + raise TypeError( + "If you are using a synchronous interface, " + "the callback of the `trace` extension should " + "be a normal function instead of an asynchronous function." + ) + + if self.debug: + if not info or "return_value" in info and info["return_value"] is None: + message = name + else: + args = " ".join([f"{key}={value!r}" for key, value in info.items()]) + message = f"{name} {args}" + self.logger.debug(message) + + def __enter__(self) -> "Trace": + if self.should_trace: + info = self.kwargs + self.trace(f"{self.name}.started", info) + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]] = None, + exc_value: Optional[BaseException] = None, + traceback: Optional[TracebackType] = None, + ) -> None: + if self.should_trace: + if exc_value is None: + info = {"return_value": self.return_value} + self.trace(f"{self.name}.complete", info) + else: + info = {"exception": exc_value} + self.trace(f"{self.name}.failed", info) + + async def atrace(self, name: str, info: Dict[str, Any]) -> None: + if self.trace_extension is not None: + prefix_and_name = f"{self.prefix}.{name}" + coro = self.trace_extension(prefix_and_name, info) + if not inspect.iscoroutine(coro): # pragma: no cover + raise TypeError( + "If you're using an asynchronous interface, " + "the callback of the `trace` extension should " + "be an asynchronous function rather than a normal function." + ) + await coro + + if self.debug: + if not info or "return_value" in info and info["return_value"] is None: + message = name + else: + args = " ".join([f"{key}={value!r}" for key, value in info.items()]) + message = f"{name} {args}" + self.logger.debug(message) + + async def __aenter__(self) -> "Trace": + if self.should_trace: + info = self.kwargs + await self.atrace(f"{self.name}.started", info) + return self + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]] = None, + exc_value: Optional[BaseException] = None, + traceback: Optional[TracebackType] = None, + ) -> None: + if self.should_trace: + if exc_value is None: + info = {"return_value": self.return_value} + await self.atrace(f"{self.name}.complete", info) + else: + info = {"exception": exc_value} + await self.atrace(f"{self.name}.failed", info) diff --git a/packages/httpcore/_types.py b/packages/httpcore/_types.py deleted file mode 100644 index 2f9eeba7f..000000000 --- a/packages/httpcore/_types.py +++ /dev/null @@ -1,12 +0,0 @@ -""" -Type definitions for type checking purposes. -""" - -from typing import List, Mapping, Optional, Tuple, TypeVar, Union - -T = TypeVar("T") -StrOrBytes = Union[str, bytes] -Origin = Tuple[bytes, bytes, int] -URL = Tuple[bytes, bytes, Optional[int], bytes] -Headers = List[Tuple[bytes, bytes]] -TimeoutDict = Mapping[str, Optional[float]] diff --git a/packages/httpcore/_utils.py b/packages/httpcore/_utils.py index 978b87a27..df5dea8fe 100644 --- a/packages/httpcore/_utils.py +++ b/packages/httpcore/_utils.py @@ -1,83 +1,12 @@ -import itertools -import logging -import os import select import socket import sys import typing -from ._types import URL, Origin - -_LOGGER_INITIALIZED = False -TRACE_LOG_LEVEL = 5 -DEFAULT_PORTS = {b"http": 80, b"https": 443} - - -class Logger(logging.Logger): - # Stub for type checkers. - def trace(self, message: str, *args: typing.Any, **kwargs: typing.Any) -> None: - ... # pragma: nocover - - -def get_logger(name: str) -> Logger: - """ - Get a `logging.Logger` instance, and optionally - set up debug logging based on the HTTPCORE_LOG_LEVEL or HTTPX_LOG_LEVEL - environment variables. - """ - global _LOGGER_INITIALIZED - if not _LOGGER_INITIALIZED: - _LOGGER_INITIALIZED = True - logging.addLevelName(TRACE_LOG_LEVEL, "TRACE") - - log_level = os.environ.get( - "HTTPCORE_LOG_LEVEL", os.environ.get("HTTPX_LOG_LEVEL", "") - ).upper() - if log_level in ("DEBUG", "TRACE"): - logger = logging.getLogger("httpcore") - logger.setLevel(logging.DEBUG if log_level == "DEBUG" else TRACE_LOG_LEVEL) - handler = logging.StreamHandler(sys.stderr) - handler.setFormatter( - logging.Formatter( - fmt="%(levelname)s [%(asctime)s] %(name)s - %(message)s", - datefmt="%Y-%m-%d %H:%M:%S", - ) - ) - logger.addHandler(handler) - - logger = logging.getLogger(name) - - def trace(message: str, *args: typing.Any, **kwargs: typing.Any) -> None: - logger.log(TRACE_LOG_LEVEL, message, *args, **kwargs) - - logger.trace = trace # type: ignore - - return typing.cast(Logger, logger) - - -def url_to_origin(url: URL) -> Origin: - scheme, host, explicit_port = url[:3] - default_port = DEFAULT_PORTS[scheme] - port = default_port if explicit_port is None else explicit_port - return scheme, host, port - - -def origin_to_url_string(origin: Origin) -> str: - scheme, host, explicit_port = origin - port = f":{explicit_port}" if explicit_port != DEFAULT_PORTS[scheme] else "" - return f"{scheme.decode('ascii')}://{host.decode('ascii')}{port}" - - -def exponential_backoff(factor: float) -> typing.Iterator[float]: - yield 0 - for n in itertools.count(2): - yield factor * (2 ** (n - 2)) - def is_socket_readable(sock: typing.Optional[socket.socket]) -> bool: """ Return whether a socket, as identifed by its file descriptor, is readable. - "A socket is readable" means that the read buffer isn't empty, i.e. that calling .recv() on it would immediately return some data. """ @@ -88,7 +17,7 @@ def is_socket_readable(sock: typing.Optional[socket.socket]) -> bool: # descriptor, we treat it as being readable, as if it the next read operation # on it is ready to return the terminating `b""`. sock_fd = None if sock is None else sock.fileno() - if sock_fd is None or sock_fd < 0: + if sock_fd is None or sock_fd < 0: # pragma: nocover return True # The implementation below was stolen from: @@ -97,7 +26,9 @@ def is_socket_readable(sock: typing.Optional[socket.socket]) -> bool: # Use select.select on Windows, and when poll is unavailable and select.poll # everywhere else. (E.g. When eventlet is in use. See #327) - if sys.platform == "win32" or getattr(select, "poll", None) is None: + if ( + sys.platform == "win32" or getattr(select, "poll", None) is None + ): # pragma: nocover rready, _, _ = select.select([sock_fd], [], [], 0) return bool(rready) p = select.poll() diff --git a/packages/httpx/__init__.py b/packages/httpx/__init__.py index 4af3904fd..f61112f8b 100644 --- a/packages/httpx/__init__.py +++ b/packages/httpx/__init__.py @@ -1,6 +1,6 @@ from .__version__ import __description__, __title__, __version__ from ._api import delete, get, head, options, patch, post, put, request, stream -from ._auth import Auth, BasicAuth, DigestAuth +from ._auth import Auth, BasicAuth, DigestAuth, NetRCAuth from ._client import USE_CLIENT_DEFAULT, AsyncClient, Client from ._config import Limits, Proxy, Timeout, create_ssl_context from ._content import ByteStream @@ -34,18 +34,30 @@ WriteError, WriteTimeout, ) -from ._models import URL, Cookies, Headers, QueryParams, Request, Response +from ._models import Cookies, Headers, Request, Response from ._status_codes import codes from ._transports.asgi import ASGITransport -from ._transports.base import ( - AsyncBaseTransport, - AsyncByteStream, - BaseTransport, - SyncByteStream, -) +from ._transports.base import AsyncBaseTransport, BaseTransport from ._transports.default import AsyncHTTPTransport, HTTPTransport from ._transports.mock import MockTransport from ._transports.wsgi import WSGITransport +from ._types import AsyncByteStream, SyncByteStream +from ._urls import URL, QueryParams + +try: + from ._main import main +except ImportError: # pragma: no cover + + def main() -> None: # type: ignore + import sys + + print( + "The httpx command line client could not run because the required " + "dependencies were not installed.\nMake sure you've installed " + "everything with: pip install 'httpx[cli]'" + ) + sys.exit(1) + __all__ = [ "__description__", @@ -80,7 +92,9 @@ "InvalidURL", "Limits", "LocalProtocolError", + "main", "MockTransport", + "NetRCAuth", "NetworkError", "options", "patch", diff --git a/packages/httpx/__version__.py b/packages/httpx/__version__.py index cc8296544..bfa421ad6 100644 --- a/packages/httpx/__version__.py +++ b/packages/httpx/__version__.py @@ -1,3 +1,3 @@ __title__ = "httpx" __description__ = "A next generation HTTP client, for Python 3." -__version__ = "0.18.2" +__version__ = "0.25.0" diff --git a/packages/httpx/_api.py b/packages/httpx/_api.py index da8185388..571289cf2 100644 --- a/packages/httpx/_api.py +++ b/packages/httpx/_api.py @@ -24,19 +24,19 @@ def request( method: str, url: URLTypes, *, - params: QueryParamTypes = None, - content: RequestContent = None, - data: RequestData = None, - files: RequestFiles = None, - json: typing.Any = None, - headers: HeaderTypes = None, - cookies: CookieTypes = None, - auth: AuthTypes = None, - proxies: ProxiesTypes = None, + params: typing.Optional[QueryParamTypes] = None, + content: typing.Optional[RequestContent] = None, + data: typing.Optional[RequestData] = None, + files: typing.Optional[RequestFiles] = None, + json: typing.Optional[typing.Any] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, + auth: typing.Optional[AuthTypes] = None, + proxies: typing.Optional[ProxiesTypes] = None, timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, - allow_redirects: bool = True, + follow_redirects: bool = False, verify: VerifyTypes = True, - cert: CertTypes = None, + cert: typing.Optional[CertTypes] = None, trust_env: bool = True, ) -> Response: """ @@ -66,7 +66,7 @@ def request( * **proxies** - *(optional)* A dictionary mapping proxy keys to proxy URLs. * **timeout** - *(optional)* The timeout configuration to use when sending the request. - * **allow_redirects** - *(optional)* Enables or disables HTTP redirects. + * **follow_redirects** - *(optional)* Enables or disables HTTP redirects. * **verify** - *(optional)* SSL certificates (a.k.a CA bundle) used to verify the identity of requested hosts. Either `True` (default CA bundle), a path to an SSL certificate file, an `ssl.SSLContext`, or `False` @@ -107,7 +107,7 @@ def request( params=params, headers=headers, auth=auth, - allow_redirects=allow_redirects, + follow_redirects=follow_redirects, ) @@ -116,19 +116,19 @@ def stream( method: str, url: URLTypes, *, - params: QueryParamTypes = None, - content: RequestContent = None, - data: RequestData = None, - files: RequestFiles = None, - json: typing.Any = None, - headers: HeaderTypes = None, - cookies: CookieTypes = None, - auth: AuthTypes = None, - proxies: ProxiesTypes = None, + params: typing.Optional[QueryParamTypes] = None, + content: typing.Optional[RequestContent] = None, + data: typing.Optional[RequestData] = None, + files: typing.Optional[RequestFiles] = None, + json: typing.Optional[typing.Any] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, + auth: typing.Optional[AuthTypes] = None, + proxies: typing.Optional[ProxiesTypes] = None, timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, - allow_redirects: bool = True, + follow_redirects: bool = False, verify: VerifyTypes = True, - cert: CertTypes = None, + cert: typing.Optional[CertTypes] = None, trust_env: bool = True, ) -> typing.Iterator[Response]: """ @@ -159,7 +159,7 @@ def stream( params=params, headers=headers, auth=auth, - allow_redirects=allow_redirects, + follow_redirects=follow_redirects, ) as response: yield response @@ -167,13 +167,13 @@ def stream( def get( url: URLTypes, *, - params: QueryParamTypes = None, - headers: HeaderTypes = None, - cookies: CookieTypes = None, - auth: AuthTypes = None, - proxies: ProxiesTypes = None, - allow_redirects: bool = True, - cert: CertTypes = None, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, + auth: typing.Optional[AuthTypes] = None, + proxies: typing.Optional[ProxiesTypes] = None, + follow_redirects: bool = False, + cert: typing.Optional[CertTypes] = None, verify: VerifyTypes = True, timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, trust_env: bool = True, @@ -183,8 +183,8 @@ def get( **Parameters**: See `httpx.request`. - Note that the `data`, `files`, and `json` parameters are not available on - this function, as `GET` requests should not include a request body. + Note that the `data`, `files`, `json` and `content` parameters are not available + on this function, as `GET` requests should not include a request body. """ return request( "GET", @@ -194,7 +194,7 @@ def get( cookies=cookies, auth=auth, proxies=proxies, - allow_redirects=allow_redirects, + follow_redirects=follow_redirects, cert=cert, verify=verify, timeout=timeout, @@ -205,13 +205,13 @@ def get( def options( url: URLTypes, *, - params: QueryParamTypes = None, - headers: HeaderTypes = None, - cookies: CookieTypes = None, - auth: AuthTypes = None, - proxies: ProxiesTypes = None, - allow_redirects: bool = True, - cert: CertTypes = None, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, + auth: typing.Optional[AuthTypes] = None, + proxies: typing.Optional[ProxiesTypes] = None, + follow_redirects: bool = False, + cert: typing.Optional[CertTypes] = None, verify: VerifyTypes = True, timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, trust_env: bool = True, @@ -221,8 +221,8 @@ def options( **Parameters**: See `httpx.request`. - Note that the `data`, `files`, and `json` parameters are not available on - this function, as `OPTIONS` requests should not include a request body. + Note that the `data`, `files`, `json` and `content` parameters are not available + on this function, as `OPTIONS` requests should not include a request body. """ return request( "OPTIONS", @@ -232,7 +232,7 @@ def options( cookies=cookies, auth=auth, proxies=proxies, - allow_redirects=allow_redirects, + follow_redirects=follow_redirects, cert=cert, verify=verify, timeout=timeout, @@ -243,13 +243,13 @@ def options( def head( url: URLTypes, *, - params: QueryParamTypes = None, - headers: HeaderTypes = None, - cookies: CookieTypes = None, - auth: AuthTypes = None, - proxies: ProxiesTypes = None, - allow_redirects: bool = True, - cert: CertTypes = None, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, + auth: typing.Optional[AuthTypes] = None, + proxies: typing.Optional[ProxiesTypes] = None, + follow_redirects: bool = False, + cert: typing.Optional[CertTypes] = None, verify: VerifyTypes = True, timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, trust_env: bool = True, @@ -259,8 +259,8 @@ def head( **Parameters**: See `httpx.request`. - Note that the `data`, `files`, and `json` parameters are not available on - this function, as `HEAD` requests should not include a request body. + Note that the `data`, `files`, `json` and `content` parameters are not available + on this function, as `HEAD` requests should not include a request body. """ return request( "HEAD", @@ -270,7 +270,7 @@ def head( cookies=cookies, auth=auth, proxies=proxies, - allow_redirects=allow_redirects, + follow_redirects=follow_redirects, cert=cert, verify=verify, timeout=timeout, @@ -281,17 +281,17 @@ def head( def post( url: URLTypes, *, - content: RequestContent = None, - data: RequestData = None, - files: RequestFiles = None, - json: typing.Any = None, - params: QueryParamTypes = None, - headers: HeaderTypes = None, - cookies: CookieTypes = None, - auth: AuthTypes = None, - proxies: ProxiesTypes = None, - allow_redirects: bool = True, - cert: CertTypes = None, + content: typing.Optional[RequestContent] = None, + data: typing.Optional[RequestData] = None, + files: typing.Optional[RequestFiles] = None, + json: typing.Optional[typing.Any] = None, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, + auth: typing.Optional[AuthTypes] = None, + proxies: typing.Optional[ProxiesTypes] = None, + follow_redirects: bool = False, + cert: typing.Optional[CertTypes] = None, verify: VerifyTypes = True, timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, trust_env: bool = True, @@ -313,7 +313,7 @@ def post( cookies=cookies, auth=auth, proxies=proxies, - allow_redirects=allow_redirects, + follow_redirects=follow_redirects, cert=cert, verify=verify, timeout=timeout, @@ -324,17 +324,17 @@ def post( def put( url: URLTypes, *, - content: RequestContent = None, - data: RequestData = None, - files: RequestFiles = None, - json: typing.Any = None, - params: QueryParamTypes = None, - headers: HeaderTypes = None, - cookies: CookieTypes = None, - auth: AuthTypes = None, - proxies: ProxiesTypes = None, - allow_redirects: bool = True, - cert: CertTypes = None, + content: typing.Optional[RequestContent] = None, + data: typing.Optional[RequestData] = None, + files: typing.Optional[RequestFiles] = None, + json: typing.Optional[typing.Any] = None, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, + auth: typing.Optional[AuthTypes] = None, + proxies: typing.Optional[ProxiesTypes] = None, + follow_redirects: bool = False, + cert: typing.Optional[CertTypes] = None, verify: VerifyTypes = True, timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, trust_env: bool = True, @@ -356,7 +356,7 @@ def put( cookies=cookies, auth=auth, proxies=proxies, - allow_redirects=allow_redirects, + follow_redirects=follow_redirects, cert=cert, verify=verify, timeout=timeout, @@ -367,17 +367,17 @@ def put( def patch( url: URLTypes, *, - content: RequestContent = None, - data: RequestData = None, - files: RequestFiles = None, - json: typing.Any = None, - params: QueryParamTypes = None, - headers: HeaderTypes = None, - cookies: CookieTypes = None, - auth: AuthTypes = None, - proxies: ProxiesTypes = None, - allow_redirects: bool = True, - cert: CertTypes = None, + content: typing.Optional[RequestContent] = None, + data: typing.Optional[RequestData] = None, + files: typing.Optional[RequestFiles] = None, + json: typing.Optional[typing.Any] = None, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, + auth: typing.Optional[AuthTypes] = None, + proxies: typing.Optional[ProxiesTypes] = None, + follow_redirects: bool = False, + cert: typing.Optional[CertTypes] = None, verify: VerifyTypes = True, timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, trust_env: bool = True, @@ -399,7 +399,7 @@ def patch( cookies=cookies, auth=auth, proxies=proxies, - allow_redirects=allow_redirects, + follow_redirects=follow_redirects, cert=cert, verify=verify, timeout=timeout, @@ -410,13 +410,13 @@ def patch( def delete( url: URLTypes, *, - params: QueryParamTypes = None, - headers: HeaderTypes = None, - cookies: CookieTypes = None, - auth: AuthTypes = None, - proxies: ProxiesTypes = None, - allow_redirects: bool = True, - cert: CertTypes = None, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, + auth: typing.Optional[AuthTypes] = None, + proxies: typing.Optional[ProxiesTypes] = None, + follow_redirects: bool = False, + cert: typing.Optional[CertTypes] = None, verify: VerifyTypes = True, timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, trust_env: bool = True, @@ -426,8 +426,8 @@ def delete( **Parameters**: See `httpx.request`. - Note that the `data`, `files`, and `json` parameters are not available on - this function, as `DELETE` requests should not include a request body. + Note that the `data`, `files`, `json` and `content` parameters are not available + on this function, as `DELETE` requests should not include a request body. """ return request( "DELETE", @@ -437,7 +437,7 @@ def delete( cookies=cookies, auth=auth, proxies=proxies, - allow_redirects=allow_redirects, + follow_redirects=follow_redirects, cert=cert, verify=verify, timeout=timeout, diff --git a/packages/httpx/_auth.py b/packages/httpx/_auth.py index 343f9cdd1..1d7385d57 100644 --- a/packages/httpx/_auth.py +++ b/packages/httpx/_auth.py @@ -1,4 +1,5 @@ import hashlib +import netrc import os import re import time @@ -10,6 +11,9 @@ from ._models import Request, Response from ._utils import to_bytes, to_str, unquote +if typing.TYPE_CHECKING: # pragma: no cover + from hashlib import _Hash + class Auth: """ @@ -138,8 +142,36 @@ def _build_auth_header( return f"Basic {token}" +class NetRCAuth(Auth): + """ + Use a 'netrc' file to lookup basic auth credentials based on the url host. + """ + + def __init__(self, file: typing.Optional[str] = None): + self._netrc_info = netrc.netrc(file) + + def auth_flow(self, request: Request) -> typing.Generator[Request, Response, None]: + auth_info = self._netrc_info.authenticators(request.url.host) + if auth_info is None or not auth_info[2]: + # The netrc file did not have authentication credentials for this host. + yield request + else: + # Build a basic auth header with credentials from the netrc file. + request.headers["Authorization"] = self._build_auth_header( + username=auth_info[0], password=auth_info[2] + ) + yield request + + def _build_auth_header( + self, username: typing.Union[str, bytes], password: typing.Union[str, bytes] + ) -> str: + userpass = b":".join((to_bytes(username), to_bytes(password))) + token = b64encode(userpass).decode() + return f"Basic {token}" + + class DigestAuth(Auth): - _ALGORITHM_TO_HASH_FUNCTION: typing.Dict[str, typing.Callable] = { + _ALGORITHM_TO_HASH_FUNCTION: typing.Dict[str, typing.Callable[[bytes], "_Hash"]] = { "MD5": hashlib.md5, "MD5-SESS": hashlib.md5, "SHA": hashlib.sha1, @@ -155,8 +187,15 @@ def __init__( ) -> None: self._username = to_bytes(username) self._password = to_bytes(password) + self._last_challenge: typing.Optional[_DigestAuthChallenge] = None + self._nonce_count = 1 def auth_flow(self, request: Request) -> typing.Generator[Request, Response, None]: + if self._last_challenge: + request.headers["Authorization"] = self._build_auth_header( + request, self._last_challenge + ) + response = yield request if response.status_code != 401 or "www-authenticate" not in response.headers: @@ -172,8 +211,12 @@ def auth_flow(self, request: Request) -> typing.Generator[Request, Response, Non # header, then we don't need to build an authenticated request. return - challenge = self._parse_challenge(request, response, auth_header) - request.headers["Authorization"] = self._build_auth_header(request, challenge) + self._last_challenge = self._parse_challenge(request, response, auth_header) + self._nonce_count = 1 + + request.headers["Authorization"] = self._build_auth_header( + request, self._last_challenge + ) yield request def _parse_challenge( @@ -210,7 +253,7 @@ def _parse_challenge( def _build_auth_header( self, request: Request, challenge: "_DigestAuthChallenge" ) -> str: - hash_func = self._ALGORITHM_TO_HASH_FUNCTION[challenge.algorithm] + hash_func = self._ALGORITHM_TO_HASH_FUNCTION[challenge.algorithm.upper()] def digest(data: bytes) -> bytes: return hash_func(data).hexdigest().encode() @@ -222,9 +265,9 @@ def digest(data: bytes) -> bytes: # TODO: implement auth-int HA2 = digest(A2) - nonce_count = 1 # TODO: implement nonce counting - nc_value = b"%08x" % nonce_count - cnonce = self._get_client_nonce(nonce_count, challenge.nonce) + nc_value = b"%08x" % self._nonce_count + cnonce = self._get_client_nonce(self._nonce_count, challenge.nonce) + self._nonce_count += 1 HA1 = digest(A1) if challenge.algorithm.lower().endswith("-sess"): diff --git a/packages/httpx/_client.py b/packages/httpx/_client.py index c6e1efbe6..cb475e020 100644 --- a/packages/httpx/_client.py +++ b/packages/httpx/_client.py @@ -1,13 +1,13 @@ import datetime import enum +import logging import typing import warnings -from contextlib import contextmanager +from contextlib import asynccontextmanager, contextmanager from types import TracebackType from .__version__ import __version__ from ._auth import Auth, BasicAuth, FunctionAuth -from ._compat import asynccontextmanager from ._config import ( DEFAULT_LIMITS, DEFAULT_MAX_REDIRECTS, @@ -23,18 +23,14 @@ TooManyRedirects, request_context, ) -from ._models import URL, Cookies, Headers, QueryParams, Request, Response +from ._models import Cookies, Headers, Request, Response from ._status_codes import codes from ._transports.asgi import ASGITransport -from ._transports.base import ( - AsyncBaseTransport, - AsyncByteStream, - BaseTransport, - SyncByteStream, -) +from ._transports.base import AsyncBaseTransport, BaseTransport from ._transports.default import AsyncHTTPTransport, HTTPTransport from ._transports.wsgi import WSGITransport from ._types import ( + AsyncByteStream, AuthTypes, CertTypes, CookieTypes, @@ -43,17 +39,19 @@ QueryParamTypes, RequestContent, RequestData, + RequestExtensions, RequestFiles, + SyncByteStream, TimeoutTypes, URLTypes, VerifyTypes, ) +from ._urls import URL, QueryParams from ._utils import ( - NetRCInfo, Timer, URLPattern, get_environment_proxies, - get_logger, + is_https_redirect, same_origin, ) @@ -82,13 +80,11 @@ class UseClientDefault: but it is used internally when a parameter is not included. """ - pass # pragma: nocover - USE_CLIENT_DEFAULT = UseClientDefault() -logger = get_logger(__name__) +logger = logging.getLogger("httpx") USER_AGENT = f"python-httpx/{__version__}" ACCEPT_ENCODING = ", ".join( @@ -156,19 +152,26 @@ async def aclose(self) -> None: await self._stream.aclose() +EventHook = typing.Callable[..., typing.Any] + + class BaseClient: def __init__( self, *, - auth: AuthTypes = None, - params: QueryParamTypes = None, - headers: HeaderTypes = None, - cookies: CookieTypes = None, + auth: typing.Optional[AuthTypes] = None, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + follow_redirects: bool = False, max_redirects: int = DEFAULT_MAX_REDIRECTS, - event_hooks: typing.Mapping[str, typing.List[typing.Callable]] = None, + event_hooks: typing.Optional[ + typing.Mapping[str, typing.List[EventHook]] + ] = None, base_url: URLTypes = "", trust_env: bool = True, + default_encoding: typing.Union[str, typing.Callable[[bytes], str]] = "utf-8", ): event_hooks = {} if event_hooks is None else event_hooks @@ -179,13 +182,14 @@ def __init__( self.headers = Headers(headers) self._cookies = Cookies(cookies) self._timeout = Timeout(timeout) + self.follow_redirects = follow_redirects self.max_redirects = max_redirects self._event_hooks = { "request": list(event_hooks.get("request", [])), "response": list(event_hooks.get("response", [])), } self._trust_env = trust_env - self._netrc = NetRCInfo() + self._default_encoding = default_encoding self._state = ClientState.UNOPENED @property @@ -233,12 +237,12 @@ def timeout(self, timeout: TimeoutTypes) -> None: self._timeout = Timeout(timeout) @property - def event_hooks(self) -> typing.Dict[str, typing.List[typing.Callable]]: + def event_hooks(self) -> typing.Dict[str, typing.List[EventHook]]: return self._event_hooks @event_hooks.setter def event_hooks( - self, event_hooks: typing.Dict[str, typing.List[typing.Callable]] + self, event_hooks: typing.Dict[str, typing.List[EventHook]] ) -> None: self._event_hooks = { "request": list(event_hooks.get("request", [])), @@ -318,13 +322,15 @@ def build_request( method: str, url: URLTypes, *, - content: RequestContent = None, - data: RequestData = None, - files: RequestFiles = None, - json: typing.Any = None, - params: QueryParamTypes = None, - headers: HeaderTypes = None, - cookies: CookieTypes = None, + content: typing.Optional[RequestContent] = None, + data: typing.Optional[RequestData] = None, + files: typing.Optional[RequestFiles] = None, + json: typing.Optional[typing.Any] = None, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, + timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + extensions: typing.Optional[RequestExtensions] = None, ) -> Request: """ Build and return a request instance. @@ -341,6 +347,14 @@ def build_request( headers = self._merge_headers(headers) cookies = self._merge_cookies(cookies) params = self._merge_queryparams(params) + extensions = {} if extensions is None else extensions + if "timeout" not in extensions: + timeout = ( + self.timeout + if isinstance(timeout, UseClientDefault) + else Timeout(timeout) + ) + extensions = dict(**extensions, timeout=timeout.as_dict()) return Request( method, url, @@ -351,6 +365,7 @@ def build_request( params=params, headers=headers, cookies=cookies, + extensions=extensions, ) def _merge_url(self, url: URLTypes) -> URL: @@ -362,7 +377,7 @@ def _merge_url(self, url: URLTypes) -> URL: if merge_url.is_relative_url: # To merge URLs we always append to the base URL. To get this # behaviour correct we always ensure the base URL ends in a '/' - # seperator, and strip any leading '/' from the merge URL. + # separator, and strip any leading '/' from the merge URL. # # So, eg... # @@ -376,7 +391,7 @@ def _merge_url(self, url: URLTypes) -> URL: return merge_url def _merge_cookies( - self, cookies: CookieTypes = None + self, cookies: typing.Optional[CookieTypes] = None ) -> typing.Optional[CookieTypes]: """ Merge a cookies argument together with any cookies on the client, @@ -389,7 +404,7 @@ def _merge_cookies( return cookies def _merge_headers( - self, headers: HeaderTypes = None + self, headers: typing.Optional[HeaderTypes] = None ) -> typing.Optional[HeaderTypes]: """ Merge a headers argument together with any headers on the client, @@ -400,7 +415,7 @@ def _merge_headers( return merged_headers def _merge_queryparams( - self, params: QueryParamTypes = None + self, params: typing.Optional[QueryParamTypes] = None ) -> typing.Optional[QueryParamTypes]: """ Merge a queryparams argument together with any queryparams on the client, @@ -408,11 +423,10 @@ def _merge_queryparams( """ if params or self.params: merged_queryparams = QueryParams(self.params) - merged_queryparams = merged_queryparams.merge(params) - return merged_queryparams + return merged_queryparams.merge(params) return params - def _build_auth(self, auth: AuthTypes) -> typing.Optional[Auth]: + def _build_auth(self, auth: typing.Optional[AuthTypes]) -> typing.Optional[Auth]: if auth is None: return None elif isinstance(auth, tuple): @@ -427,7 +441,7 @@ def _build_auth(self, auth: AuthTypes) -> typing.Optional[Auth]: def _build_request_auth( self, request: Request, - auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + auth: typing.Union[AuthTypes, UseClientDefault, None] = USE_CLIENT_DEFAULT, ) -> Auth: auth = ( self._auth if isinstance(auth, UseClientDefault) else self._build_auth(auth) @@ -440,11 +454,6 @@ def _build_request_auth( if username or password: return BasicAuth(username=username, password=password) - if self.trust_env and "Authorization" not in request.headers: - credentials = self._netrc.get_credentials(request.url.host) - if credentials is not None: - return BasicAuth(username=credentials[0], password=credentials[1]) - return Auth() def _build_redirect_request(self, request: Request, response: Response) -> Request: @@ -458,7 +467,12 @@ def _build_redirect_request(self, request: Request, response: Response) -> Reque stream = self._redirect_stream(request, method) cookies = Cookies(self.cookies) return Request( - method=method, url=url, headers=headers, cookies=cookies, stream=stream + method=method, + url=url, + headers=headers, + cookies=cookies, + stream=stream, + extensions=request.extensions, ) def _redirect_method(self, request: Request, response: Response) -> str: @@ -520,9 +534,10 @@ def _redirect_headers(self, request: Request, url: URL, method: str) -> Headers: headers = Headers(request.headers) if not same_origin(url, request.url): - # Strip Authorization headers when responses are redirected away from - # the origin. - headers.pop("Authorization", None) + if not is_https_redirect(request.url, url): + # Strip Authorization headers when responses are redirected + # away from the origin. (Except for direct HTTP to HTTPS redirects.) + headers.pop("Authorization", None) # Update the Host header. headers["Host"] = url.netloc.decode("ascii") @@ -555,6 +570,8 @@ class Client(BaseClient): """ An HTTP client, with connection pooling, HTTP/2, redirects, cookie persistence, etc. + It can be shared between threads. + Usage: ```python @@ -595,29 +612,36 @@ class Client(BaseClient): rather than sending actual network requests. * **trust_env** - *(optional)* Enables or disables usage of environment variables for configuration. + * **default_encoding** - *(optional)* The default encoding to use for decoding + response text, if no charset information is included in a response Content-Type + header. Set to a callable for automatic character set detection. Default: "utf-8". """ def __init__( self, *, - auth: AuthTypes = None, - params: QueryParamTypes = None, - headers: HeaderTypes = None, - cookies: CookieTypes = None, + auth: typing.Optional[AuthTypes] = None, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, verify: VerifyTypes = True, - cert: CertTypes = None, + cert: typing.Optional[CertTypes] = None, http1: bool = True, http2: bool = False, - proxies: ProxiesTypes = None, - mounts: typing.Mapping[str, BaseTransport] = None, + proxies: typing.Optional[ProxiesTypes] = None, + mounts: typing.Optional[typing.Mapping[str, BaseTransport]] = None, timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + follow_redirects: bool = False, limits: Limits = DEFAULT_LIMITS, max_redirects: int = DEFAULT_MAX_REDIRECTS, - event_hooks: typing.Mapping[str, typing.List[typing.Callable]] = None, + event_hooks: typing.Optional[ + typing.Mapping[str, typing.List[EventHook]] + ] = None, base_url: URLTypes = "", - transport: BaseTransport = None, - app: typing.Callable = None, + transport: typing.Optional[BaseTransport] = None, + app: typing.Optional[typing.Callable[..., typing.Any]] = None, trust_env: bool = True, + default_encoding: typing.Union[str, typing.Callable[[bytes], str]] = "utf-8", ): super().__init__( auth=auth, @@ -625,16 +649,18 @@ def __init__( headers=headers, cookies=cookies, timeout=timeout, + follow_redirects=follow_redirects, max_redirects=max_redirects, event_hooks=event_hooks, base_url=base_url, trust_env=trust_env, + default_encoding=default_encoding, ) if http2: try: import h2 # noqa - except ImportError: # pragma: nocover + except ImportError: # pragma: no cover raise ImportError( "Using http2=True, but the 'h2' package is not installed. " "Make sure to install httpx using `pip install httpx[http2]`." @@ -677,12 +703,12 @@ def __init__( def _init_transport( self, verify: VerifyTypes = True, - cert: CertTypes = None, + cert: typing.Optional[CertTypes] = None, http1: bool = True, http2: bool = False, limits: Limits = DEFAULT_LIMITS, - transport: BaseTransport = None, - app: typing.Callable = None, + transport: typing.Optional[BaseTransport] = None, + app: typing.Optional[typing.Callable[..., typing.Any]] = None, trust_env: bool = True, ) -> BaseTransport: if transport is not None: @@ -704,7 +730,7 @@ def _init_proxy_transport( self, proxy: Proxy, verify: VerifyTypes = True, - cert: CertTypes = None, + cert: typing.Optional[CertTypes] = None, http1: bool = True, http2: bool = False, limits: Limits = DEFAULT_LIMITS, @@ -736,16 +762,17 @@ def request( method: str, url: URLTypes, *, - content: RequestContent = None, - data: RequestData = None, - files: RequestFiles = None, - json: typing.Any = None, - params: QueryParamTypes = None, - headers: HeaderTypes = None, - cookies: CookieTypes = None, - auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - allow_redirects: bool = True, + content: typing.Optional[RequestContent] = None, + data: typing.Optional[RequestData] = None, + files: typing.Optional[RequestFiles] = None, + json: typing.Optional[typing.Any] = None, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, + auth: typing.Union[AuthTypes, UseClientDefault, None] = USE_CLIENT_DEFAULT, + follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + extensions: typing.Optional[RequestExtensions] = None, ) -> Response: """ Build and send a request. @@ -781,10 +808,10 @@ def request( params=params, headers=headers, cookies=cookies, + timeout=timeout, + extensions=extensions, ) - return self.send( - request, auth=auth, allow_redirects=allow_redirects, timeout=timeout - ) + return self.send(request, auth=auth, follow_redirects=follow_redirects) @contextmanager def stream( @@ -792,16 +819,17 @@ def stream( method: str, url: URLTypes, *, - content: RequestContent = None, - data: RequestData = None, - files: RequestFiles = None, - json: typing.Any = None, - params: QueryParamTypes = None, - headers: HeaderTypes = None, - cookies: CookieTypes = None, - auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - allow_redirects: bool = True, + content: typing.Optional[RequestContent] = None, + data: typing.Optional[RequestData] = None, + files: typing.Optional[RequestFiles] = None, + json: typing.Optional[typing.Any] = None, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, + auth: typing.Union[AuthTypes, UseClientDefault, None] = USE_CLIENT_DEFAULT, + follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + extensions: typing.Optional[RequestExtensions] = None, ) -> typing.Iterator[Response]: """ Alternative to `httpx.request()` that streams the response body @@ -823,12 +851,13 @@ def stream( params=params, headers=headers, cookies=cookies, + timeout=timeout, + extensions=extensions, ) response = self.send( request=request, auth=auth, - allow_redirects=allow_redirects, - timeout=timeout, + follow_redirects=follow_redirects, stream=True, ) try: @@ -841,9 +870,8 @@ def send( request: Request, *, stream: bool = False, - auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - allow_redirects: bool = True, - timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + auth: typing.Union[AuthTypes, UseClientDefault, None] = USE_CLIENT_DEFAULT, + follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, ) -> Response: """ Send a request. @@ -862,8 +890,10 @@ def send( raise RuntimeError("Cannot send a request, as the client has been closed.") self._state = ClientState.OPENED - timeout = ( - self.timeout if isinstance(timeout, UseClientDefault) else Timeout(timeout) + follow_redirects = ( + self.follow_redirects + if isinstance(follow_redirects, UseClientDefault) + else follow_redirects ) auth = self._build_request_auth(request, auth) @@ -871,20 +901,16 @@ def send( response = self._send_handling_auth( request, auth=auth, - timeout=timeout, - allow_redirects=allow_redirects, + follow_redirects=follow_redirects, history=[], ) try: if not stream: response.read() - for hook in self._event_hooks["response"]: - hook(response) - return response - except Exception as exc: + except BaseException as exc: response.close() raise exc @@ -892,22 +918,17 @@ def _send_handling_auth( self, request: Request, auth: Auth, - timeout: Timeout, - allow_redirects: bool, + follow_redirects: bool, history: typing.List[Response], ) -> Response: auth_flow = auth.sync_auth_flow(request) try: request = next(auth_flow) - for hook in self._event_hooks["request"]: - hook(request) - while True: response = self._send_handling_redirects( request, - timeout=timeout, - allow_redirects=allow_redirects, + follow_redirects=follow_redirects, history=history, ) try: @@ -921,7 +942,7 @@ def _send_handling_auth( request = next_request history.append(response) - except Exception as exc: + except BaseException as exc: response.close() raise exc finally: @@ -930,8 +951,7 @@ def _send_handling_auth( def _send_handling_redirects( self, request: Request, - timeout: Timeout, - allow_redirects: bool, + follow_redirects: bool, history: typing.List[Response], ) -> Response: while True: @@ -940,27 +960,32 @@ def _send_handling_redirects( "Exceeded maximum allowed redirects.", request=request ) - response = self._send_single_request(request, timeout) + for hook in self._event_hooks["request"]: + hook(request) + + response = self._send_single_request(request) try: + for hook in self._event_hooks["response"]: + hook(response) response.history = list(history) - if not response.is_redirect: + if not response.has_redirect_location: return response request = self._build_redirect_request(request, response) history = history + [response] - if allow_redirects: + if follow_redirects: response.read() else: response.next_request = request return response - except Exception as exc: + except BaseException as exc: response.close() raise exc - def _send_single_request(self, request: Request, timeout: Timeout) -> Response: + def _send_single_request(self, request: Request) -> Response: """ Sends a single request, without handling any redirections. """ @@ -974,28 +999,25 @@ def _send_single_request(self, request: Request, timeout: Timeout) -> Response: ) with request_context(request=request): - (status_code, headers, stream, extensions) = transport.handle_request( - request.method.encode(), - request.url.raw, - headers=request.headers.raw, - stream=request.stream, - extensions={"timeout": timeout.as_dict()}, - ) + response = transport.handle_request(request) - response = Response( - status_code, - headers=headers, - stream=stream, - extensions=extensions, - request=request, - ) + assert isinstance(response.stream, SyncByteStream) - response.stream = BoundSyncStream(stream, response=response, timer=timer) + response.request = request + response.stream = BoundSyncStream( + response.stream, response=response, timer=timer + ) self.cookies.extract_cookies(response) - - status = f"{response.status_code} {response.reason_phrase}" - response_line = f"{response.http_version} {status}" - logger.debug(f'HTTP Request: {request.method} {request.url} "{response_line}"') + response.default_encoding = self._default_encoding + + logger.info( + 'HTTP Request: %s %s "%s %d %s"', + request.method, + request.url, + response.http_version, + response.status_code, + response.reason_phrase, + ) return response @@ -1003,12 +1025,13 @@ def get( self, url: URLTypes, *, - params: QueryParamTypes = None, - headers: HeaderTypes = None, - cookies: CookieTypes = None, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - allow_redirects: bool = True, + follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + extensions: typing.Optional[RequestExtensions] = None, ) -> Response: """ Send a `GET` request. @@ -1022,20 +1045,22 @@ def get( headers=headers, cookies=cookies, auth=auth, - allow_redirects=allow_redirects, + follow_redirects=follow_redirects, timeout=timeout, + extensions=extensions, ) def options( self, url: URLTypes, *, - params: QueryParamTypes = None, - headers: HeaderTypes = None, - cookies: CookieTypes = None, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - allow_redirects: bool = True, + follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + extensions: typing.Optional[RequestExtensions] = None, ) -> Response: """ Send an `OPTIONS` request. @@ -1049,20 +1074,22 @@ def options( headers=headers, cookies=cookies, auth=auth, - allow_redirects=allow_redirects, + follow_redirects=follow_redirects, timeout=timeout, + extensions=extensions, ) def head( self, url: URLTypes, *, - params: QueryParamTypes = None, - headers: HeaderTypes = None, - cookies: CookieTypes = None, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - allow_redirects: bool = True, + follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + extensions: typing.Optional[RequestExtensions] = None, ) -> Response: """ Send a `HEAD` request. @@ -1076,24 +1103,26 @@ def head( headers=headers, cookies=cookies, auth=auth, - allow_redirects=allow_redirects, + follow_redirects=follow_redirects, timeout=timeout, + extensions=extensions, ) def post( self, url: URLTypes, *, - content: RequestContent = None, - data: RequestData = None, - files: RequestFiles = None, - json: typing.Any = None, - params: QueryParamTypes = None, - headers: HeaderTypes = None, - cookies: CookieTypes = None, + content: typing.Optional[RequestContent] = None, + data: typing.Optional[RequestData] = None, + files: typing.Optional[RequestFiles] = None, + json: typing.Optional[typing.Any] = None, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - allow_redirects: bool = True, + follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + extensions: typing.Optional[RequestExtensions] = None, ) -> Response: """ Send a `POST` request. @@ -1111,24 +1140,26 @@ def post( headers=headers, cookies=cookies, auth=auth, - allow_redirects=allow_redirects, + follow_redirects=follow_redirects, timeout=timeout, + extensions=extensions, ) def put( self, url: URLTypes, *, - content: RequestContent = None, - data: RequestData = None, - files: RequestFiles = None, - json: typing.Any = None, - params: QueryParamTypes = None, - headers: HeaderTypes = None, - cookies: CookieTypes = None, + content: typing.Optional[RequestContent] = None, + data: typing.Optional[RequestData] = None, + files: typing.Optional[RequestFiles] = None, + json: typing.Optional[typing.Any] = None, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - allow_redirects: bool = True, + follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + extensions: typing.Optional[RequestExtensions] = None, ) -> Response: """ Send a `PUT` request. @@ -1146,24 +1177,26 @@ def put( headers=headers, cookies=cookies, auth=auth, - allow_redirects=allow_redirects, + follow_redirects=follow_redirects, timeout=timeout, + extensions=extensions, ) def patch( self, url: URLTypes, *, - content: RequestContent = None, - data: RequestData = None, - files: RequestFiles = None, - json: typing.Any = None, - params: QueryParamTypes = None, - headers: HeaderTypes = None, - cookies: CookieTypes = None, + content: typing.Optional[RequestContent] = None, + data: typing.Optional[RequestData] = None, + files: typing.Optional[RequestFiles] = None, + json: typing.Optional[typing.Any] = None, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - allow_redirects: bool = True, + follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + extensions: typing.Optional[RequestExtensions] = None, ) -> Response: """ Send a `PATCH` request. @@ -1181,20 +1214,22 @@ def patch( headers=headers, cookies=cookies, auth=auth, - allow_redirects=allow_redirects, + follow_redirects=follow_redirects, timeout=timeout, + extensions=extensions, ) def delete( self, url: URLTypes, *, - params: QueryParamTypes = None, - headers: HeaderTypes = None, - cookies: CookieTypes = None, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - allow_redirects: bool = True, + follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + extensions: typing.Optional[RequestExtensions] = None, ) -> Response: """ Send a `DELETE` request. @@ -1208,8 +1243,9 @@ def delete( headers=headers, cookies=cookies, auth=auth, - allow_redirects=allow_redirects, + follow_redirects=follow_redirects, timeout=timeout, + extensions=extensions, ) def close(self) -> None: @@ -1225,6 +1261,13 @@ def close(self) -> None: transport.close() def __enter__(self: T) -> T: + if self._state != ClientState.UNOPENED: + msg = { + ClientState.OPENED: "Cannot open a client instance more than once.", + ClientState.CLOSED: "Cannot reopen a client instance, once it has been closed.", + }[self._state] + raise RuntimeError(msg) + self._state = ClientState.OPENED self._transport.__enter__() @@ -1235,9 +1278,9 @@ def __enter__(self: T) -> T: def __exit__( self, - exc_type: typing.Type[BaseException] = None, - exc_value: BaseException = None, - traceback: TracebackType = None, + exc_type: typing.Optional[typing.Type[BaseException]] = None, + exc_value: typing.Optional[BaseException] = None, + traceback: typing.Optional[TracebackType] = None, ) -> None: self._state = ClientState.CLOSED @@ -1246,13 +1289,6 @@ def __exit__( if transport is not None: transport.__exit__(exc_type, exc_value, traceback) - def __del__(self) -> None: - # We use 'getattr' here, to manage the case where '__del__()' is called - # on a partically initiallized instance that raised an exception during - # the call to '__init__()'. - if getattr(self, "_state", None) == ClientState.OPENED: # noqa: B009 - self.close() - class AsyncClient(BaseClient): """ @@ -1278,7 +1314,8 @@ class AsyncClient(BaseClient): sending requests. * **verify** - *(optional)* SSL certificates (a.k.a CA bundle) used to verify the identity of requested hosts. Either `True` (default CA bundle), - a path to an SSL certificate file, or `False` (disable verification). + a path to an SSL certificate file, an `ssl.SSLContext`, or `False` + (which will disable verification). * **cert** - *(optional)* An SSL certificate used by the requested host to authenticate the client. Either a path to an SSL certificate file, or two-tuple of (certificate file, key file), or a three-tuple of (certificate @@ -1300,29 +1337,36 @@ class AsyncClient(BaseClient): rather than sending actual network requests. * **trust_env** - *(optional)* Enables or disables usage of environment variables for configuration. + * **default_encoding** - *(optional)* The default encoding to use for decoding + response text, if no charset information is included in a response Content-Type + header. Set to a callable for automatic character set detection. Default: "utf-8". """ def __init__( self, *, - auth: AuthTypes = None, - params: QueryParamTypes = None, - headers: HeaderTypes = None, - cookies: CookieTypes = None, + auth: typing.Optional[AuthTypes] = None, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, verify: VerifyTypes = True, - cert: CertTypes = None, + cert: typing.Optional[CertTypes] = None, http1: bool = True, http2: bool = False, - proxies: ProxiesTypes = None, - mounts: typing.Mapping[str, AsyncBaseTransport] = None, + proxies: typing.Optional[ProxiesTypes] = None, + mounts: typing.Optional[typing.Mapping[str, AsyncBaseTransport]] = None, timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + follow_redirects: bool = False, limits: Limits = DEFAULT_LIMITS, max_redirects: int = DEFAULT_MAX_REDIRECTS, - event_hooks: typing.Mapping[str, typing.List[typing.Callable]] = None, + event_hooks: typing.Optional[ + typing.Mapping[str, typing.List[typing.Callable[..., typing.Any]]] + ] = None, base_url: URLTypes = "", - transport: AsyncBaseTransport = None, - app: typing.Callable = None, + transport: typing.Optional[AsyncBaseTransport] = None, + app: typing.Optional[typing.Callable[..., typing.Any]] = None, trust_env: bool = True, + default_encoding: typing.Union[str, typing.Callable[[bytes], str]] = "utf-8", ): super().__init__( auth=auth, @@ -1330,16 +1374,18 @@ def __init__( headers=headers, cookies=cookies, timeout=timeout, + follow_redirects=follow_redirects, max_redirects=max_redirects, event_hooks=event_hooks, base_url=base_url, trust_env=trust_env, + default_encoding=default_encoding, ) if http2: try: import h2 # noqa - except ImportError: # pragma: nocover + except ImportError: # pragma: no cover raise ImportError( "Using http2=True, but the 'h2' package is not installed. " "Make sure to install httpx using `pip install httpx[http2]`." @@ -1382,12 +1428,12 @@ def __init__( def _init_transport( self, verify: VerifyTypes = True, - cert: CertTypes = None, + cert: typing.Optional[CertTypes] = None, http1: bool = True, http2: bool = False, limits: Limits = DEFAULT_LIMITS, - transport: AsyncBaseTransport = None, - app: typing.Callable = None, + transport: typing.Optional[AsyncBaseTransport] = None, + app: typing.Optional[typing.Callable[..., typing.Any]] = None, trust_env: bool = True, ) -> AsyncBaseTransport: if transport is not None: @@ -1409,7 +1455,7 @@ def _init_proxy_transport( self, proxy: Proxy, verify: VerifyTypes = True, - cert: CertTypes = None, + cert: typing.Optional[CertTypes] = None, http1: bool = True, http2: bool = False, limits: Limits = DEFAULT_LIMITS, @@ -1440,16 +1486,17 @@ async def request( method: str, url: URLTypes, *, - content: RequestContent = None, - data: RequestData = None, - files: RequestFiles = None, - json: typing.Any = None, - params: QueryParamTypes = None, - headers: HeaderTypes = None, - cookies: CookieTypes = None, - auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - allow_redirects: bool = True, + content: typing.Optional[RequestContent] = None, + data: typing.Optional[RequestData] = None, + files: typing.Optional[RequestFiles] = None, + json: typing.Optional[typing.Any] = None, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, + auth: typing.Union[AuthTypes, UseClientDefault, None] = USE_CLIENT_DEFAULT, + follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + extensions: typing.Optional[RequestExtensions] = None, ) -> Response: """ Build and send a request. @@ -1477,11 +1524,10 @@ async def request( params=params, headers=headers, cookies=cookies, + timeout=timeout, + extensions=extensions, ) - response = await self.send( - request, auth=auth, allow_redirects=allow_redirects, timeout=timeout - ) - return response + return await self.send(request, auth=auth, follow_redirects=follow_redirects) @asynccontextmanager async def stream( @@ -1489,16 +1535,17 @@ async def stream( method: str, url: URLTypes, *, - content: RequestContent = None, - data: RequestData = None, - files: RequestFiles = None, - json: typing.Any = None, - params: QueryParamTypes = None, - headers: HeaderTypes = None, - cookies: CookieTypes = None, + content: typing.Optional[RequestContent] = None, + data: typing.Optional[RequestData] = None, + files: typing.Optional[RequestFiles] = None, + json: typing.Optional[typing.Any] = None, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - allow_redirects: bool = True, + follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + extensions: typing.Optional[RequestExtensions] = None, ) -> typing.AsyncIterator[Response]: """ Alternative to `httpx.request()` that streams the response body @@ -1520,12 +1567,13 @@ async def stream( params=params, headers=headers, cookies=cookies, + timeout=timeout, + extensions=extensions, ) response = await self.send( request=request, auth=auth, - allow_redirects=allow_redirects, - timeout=timeout, + follow_redirects=follow_redirects, stream=True, ) try: @@ -1538,9 +1586,8 @@ async def send( request: Request, *, stream: bool = False, - auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - allow_redirects: bool = True, - timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + auth: typing.Union[AuthTypes, UseClientDefault, None] = USE_CLIENT_DEFAULT, + follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, ) -> Response: """ Send a request. @@ -1559,8 +1606,10 @@ async def send( raise RuntimeError("Cannot send a request, as the client has been closed.") self._state = ClientState.OPENED - timeout = ( - self.timeout if isinstance(timeout, UseClientDefault) else Timeout(timeout) + follow_redirects = ( + self.follow_redirects + if isinstance(follow_redirects, UseClientDefault) + else follow_redirects ) auth = self._build_request_auth(request, auth) @@ -1568,20 +1617,16 @@ async def send( response = await self._send_handling_auth( request, auth=auth, - timeout=timeout, - allow_redirects=allow_redirects, + follow_redirects=follow_redirects, history=[], ) try: if not stream: await response.aread() - for hook in self._event_hooks["response"]: - await hook(response) - return response - except Exception as exc: + except BaseException as exc: # pragma: no cover await response.aclose() raise exc @@ -1589,22 +1634,17 @@ async def _send_handling_auth( self, request: Request, auth: Auth, - timeout: Timeout, - allow_redirects: bool, + follow_redirects: bool, history: typing.List[Response], ) -> Response: auth_flow = auth.async_auth_flow(request) try: request = await auth_flow.__anext__() - for hook in self._event_hooks["request"]: - await hook(request) - while True: response = await self._send_handling_redirects( request, - timeout=timeout, - allow_redirects=allow_redirects, + follow_redirects=follow_redirects, history=history, ) try: @@ -1618,7 +1658,7 @@ async def _send_handling_auth( request = next_request history.append(response) - except Exception as exc: + except BaseException as exc: await response.aclose() raise exc finally: @@ -1627,8 +1667,7 @@ async def _send_handling_auth( async def _send_handling_redirects( self, request: Request, - timeout: Timeout, - allow_redirects: bool, + follow_redirects: bool, history: typing.List[Response], ) -> Response: while True: @@ -1637,29 +1676,33 @@ async def _send_handling_redirects( "Exceeded maximum allowed redirects.", request=request ) - response = await self._send_single_request(request, timeout) + for hook in self._event_hooks["request"]: + await hook(request) + + response = await self._send_single_request(request) try: + for hook in self._event_hooks["response"]: + await hook(response) + response.history = list(history) - if not response.is_redirect: + if not response.has_redirect_location: return response request = self._build_redirect_request(request, response) history = history + [response] - if allow_redirects: + if follow_redirects: await response.aread() else: response.next_request = request return response - except Exception as exc: + except BaseException as exc: await response.aclose() raise exc - async def _send_single_request( - self, request: Request, timeout: Timeout - ) -> Response: + async def _send_single_request(self, request: Request) -> Response: """ Sends a single request, without handling any redirections. """ @@ -1673,33 +1716,24 @@ async def _send_single_request( ) with request_context(request=request): - ( - status_code, - headers, - stream, - extensions, - ) = await transport.handle_async_request( - request.method.encode(), - request.url.raw, - headers=request.headers.raw, - stream=request.stream, - extensions={"timeout": timeout.as_dict()}, - ) + response = await transport.handle_async_request(request) - response = Response( - status_code, - headers=headers, - stream=stream, - extensions=extensions, - request=request, + assert isinstance(response.stream, AsyncByteStream) + response.request = request + response.stream = BoundAsyncStream( + response.stream, response=response, timer=timer ) - - response.stream = BoundAsyncStream(stream, response=response, timer=timer) self.cookies.extract_cookies(response) - - status = f"{response.status_code} {response.reason_phrase}" - response_line = f"{response.http_version} {status}" - logger.debug(f'HTTP Request: {request.method} {request.url} "{response_line}"') + response.default_encoding = self._default_encoding + + logger.info( + 'HTTP Request: %s %s "%s %d %s"', + request.method, + request.url, + response.http_version, + response.status_code, + response.reason_phrase, + ) return response @@ -1707,12 +1741,13 @@ async def get( self, url: URLTypes, *, - params: QueryParamTypes = None, - headers: HeaderTypes = None, - cookies: CookieTypes = None, - auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - allow_redirects: bool = True, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, + auth: typing.Union[AuthTypes, UseClientDefault, None] = USE_CLIENT_DEFAULT, + follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + extensions: typing.Optional[RequestExtensions] = None, ) -> Response: """ Send a `GET` request. @@ -1726,20 +1761,22 @@ async def get( headers=headers, cookies=cookies, auth=auth, - allow_redirects=allow_redirects, + follow_redirects=follow_redirects, timeout=timeout, + extensions=extensions, ) async def options( self, url: URLTypes, *, - params: QueryParamTypes = None, - headers: HeaderTypes = None, - cookies: CookieTypes = None, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - allow_redirects: bool = True, + follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + extensions: typing.Optional[RequestExtensions] = None, ) -> Response: """ Send an `OPTIONS` request. @@ -1753,20 +1790,22 @@ async def options( headers=headers, cookies=cookies, auth=auth, - allow_redirects=allow_redirects, + follow_redirects=follow_redirects, timeout=timeout, + extensions=extensions, ) async def head( self, url: URLTypes, *, - params: QueryParamTypes = None, - headers: HeaderTypes = None, - cookies: CookieTypes = None, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - allow_redirects: bool = True, + follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + extensions: typing.Optional[RequestExtensions] = None, ) -> Response: """ Send a `HEAD` request. @@ -1780,24 +1819,26 @@ async def head( headers=headers, cookies=cookies, auth=auth, - allow_redirects=allow_redirects, + follow_redirects=follow_redirects, timeout=timeout, + extensions=extensions, ) async def post( self, url: URLTypes, *, - content: RequestContent = None, - data: RequestData = None, - files: RequestFiles = None, - json: typing.Any = None, - params: QueryParamTypes = None, - headers: HeaderTypes = None, - cookies: CookieTypes = None, + content: typing.Optional[RequestContent] = None, + data: typing.Optional[RequestData] = None, + files: typing.Optional[RequestFiles] = None, + json: typing.Optional[typing.Any] = None, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - allow_redirects: bool = True, + follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + extensions: typing.Optional[RequestExtensions] = None, ) -> Response: """ Send a `POST` request. @@ -1815,24 +1856,26 @@ async def post( headers=headers, cookies=cookies, auth=auth, - allow_redirects=allow_redirects, + follow_redirects=follow_redirects, timeout=timeout, + extensions=extensions, ) async def put( self, url: URLTypes, *, - content: RequestContent = None, - data: RequestData = None, - files: RequestFiles = None, - json: typing.Any = None, - params: QueryParamTypes = None, - headers: HeaderTypes = None, - cookies: CookieTypes = None, + content: typing.Optional[RequestContent] = None, + data: typing.Optional[RequestData] = None, + files: typing.Optional[RequestFiles] = None, + json: typing.Optional[typing.Any] = None, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - allow_redirects: bool = True, + follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + extensions: typing.Optional[RequestExtensions] = None, ) -> Response: """ Send a `PUT` request. @@ -1850,24 +1893,26 @@ async def put( headers=headers, cookies=cookies, auth=auth, - allow_redirects=allow_redirects, + follow_redirects=follow_redirects, timeout=timeout, + extensions=extensions, ) async def patch( self, url: URLTypes, *, - content: RequestContent = None, - data: RequestData = None, - files: RequestFiles = None, - json: typing.Any = None, - params: QueryParamTypes = None, - headers: HeaderTypes = None, - cookies: CookieTypes = None, + content: typing.Optional[RequestContent] = None, + data: typing.Optional[RequestData] = None, + files: typing.Optional[RequestFiles] = None, + json: typing.Optional[typing.Any] = None, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - allow_redirects: bool = True, + follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + extensions: typing.Optional[RequestExtensions] = None, ) -> Response: """ Send a `PATCH` request. @@ -1885,20 +1930,22 @@ async def patch( headers=headers, cookies=cookies, auth=auth, - allow_redirects=allow_redirects, + follow_redirects=follow_redirects, timeout=timeout, + extensions=extensions, ) async def delete( self, url: URLTypes, *, - params: QueryParamTypes = None, - headers: HeaderTypes = None, - cookies: CookieTypes = None, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - allow_redirects: bool = True, + follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + extensions: typing.Optional[RequestExtensions] = None, ) -> Response: """ Send a `DELETE` request. @@ -1912,8 +1959,9 @@ async def delete( headers=headers, cookies=cookies, auth=auth, - allow_redirects=allow_redirects, + follow_redirects=follow_redirects, timeout=timeout, + extensions=extensions, ) async def aclose(self) -> None: @@ -1929,6 +1977,13 @@ async def aclose(self) -> None: await proxy.aclose() async def __aenter__(self: U) -> U: + if self._state != ClientState.UNOPENED: + msg = { + ClientState.OPENED: "Cannot open a client instance more than once.", + ClientState.CLOSED: "Cannot reopen a client instance, once it has been closed.", + }[self._state] + raise RuntimeError(msg) + self._state = ClientState.OPENED await self._transport.__aenter__() @@ -1939,9 +1994,9 @@ async def __aenter__(self: U) -> U: async def __aexit__( self, - exc_type: typing.Type[BaseException] = None, - exc_value: BaseException = None, - traceback: TracebackType = None, + exc_type: typing.Optional[typing.Type[BaseException]] = None, + exc_value: typing.Optional[BaseException] = None, + traceback: typing.Optional[TracebackType] = None, ) -> None: self._state = ClientState.CLOSED @@ -1949,34 +2004,3 @@ async def __aexit__( for proxy in self._mounts.values(): if proxy is not None: await proxy.__aexit__(exc_type, exc_value, traceback) - - def __del__(self) -> None: - # We use 'getattr' here, to manage the case where '__del__()' is called - # on a partically initiallized instance that raised an exception during - # the call to '__init__()'. - if getattr(self, "_state", None) == ClientState.OPENED: # noqa: B009 - # Unlike the sync case, we cannot silently close the client when - # it is garbage collected, because `.aclose()` is an async operation, - # but `__del__` is not. - # - # For this reason we require explicit close management for - # `AsyncClient`, and issue a warning on unclosed clients. - # - # The context managed style is usually preferable, because it neatly - # ensures proper resource cleanup: - # - # async with httpx.AsyncClient() as client: - # ... - # - # However, an explicit call to `aclose()` is also sufficient: - # - # client = httpx.AsyncClient() - # try: - # ... - # finally: - # await client.aclose() - warnings.warn( - f"Unclosed {self!r}. " - "See https://www.python-httpx.org/async/#opening-and-closing-clients " - "for details." - ) diff --git a/packages/httpx/_compat.py b/packages/httpx/_compat.py index 98a3e37b8..a271c6b80 100644 --- a/packages/httpx/_compat.py +++ b/packages/httpx/_compat.py @@ -5,21 +5,39 @@ import ssl import sys -# `contextlib.asynccontextmanager` exists from Python 3.7 onwards. -# For 3.6 we require the `async_generator` package for a backported version. +# Brotli support is optional +# The C bindings in `brotli` are recommended for CPython. +# The CFFI bindings in `brotlicffi` are recommended for PyPy and everything else. try: - from contextlib import asynccontextmanager # type: ignore -except ImportError: - from async_generator import asynccontextmanager # type: ignore # noqa + import brotlicffi as brotli +except ImportError: # pragma: no cover + try: + import brotli + except ImportError: + brotli = None +if sys.version_info >= (3, 10) or ( + sys.version_info >= (3, 8) and ssl.OPENSSL_VERSION_INFO >= (1, 1, 0, 7) +): -def set_minimum_tls_version_1_2(context: ssl.SSLContext) -> None: - if sys.version_info >= (3, 10): + def set_minimum_tls_version_1_2(context: ssl.SSLContext) -> None: + # The OP_NO_SSL* and OP_NO_TLS* become deprecated in favor of + # 'SSLContext.minimum_version' from Python 3.7 onwards, however + # this attribute is not available unless the ssl module is compiled + # with OpenSSL 1.1.0g or newer. + # https://docs.python.org/3.10/library/ssl.html#ssl.SSLContext.minimum_version + # https://docs.python.org/3.7/library/ssl.html#ssl.SSLContext.minimum_version context.minimum_version = ssl.TLSVersion.TLSv1_2 - else: - # These become deprecated in favor of 'context.minimum_version' - # from Python 3.10 onwards. + +else: + + def set_minimum_tls_version_1_2(context: ssl.SSLContext) -> None: + # If 'minimum_version' isn't available, we configure these options with + # the older deprecated variants. context.options |= ssl.OP_NO_SSLv2 context.options |= ssl.OP_NO_SSLv3 context.options |= ssl.OP_NO_TLSv1 context.options |= ssl.OP_NO_TLSv1_1 + + +__all__ = ["brotli", "set_minimum_tls_version_1_2"] diff --git a/packages/httpx/_config.py b/packages/httpx/_config.py index 9d29f9f2f..39d81a20a 100644 --- a/packages/httpx/_config.py +++ b/packages/httpx/_config.py @@ -1,15 +1,17 @@ +import logging import os import ssl +import sys import typing -from base64 import b64encode from pathlib import Path import certifi from ._compat import set_minimum_tls_version_1_2 -from ._models import URL, Headers +from ._models import Headers from ._types import CertTypes, HeaderTypes, TimeoutTypes, URLTypes, VerifyTypes -from ._utils import get_ca_bundle_from_env, get_logger +from ._urls import URL +from ._utils import get_ca_bundle_from_env DEFAULT_CIPHERS = ":".join( [ @@ -31,18 +33,18 @@ ) -logger = get_logger(__name__) +logger = logging.getLogger("httpx") class UnsetType: - pass # pragma: nocover + pass # pragma: no cover UNSET = UnsetType() def create_ssl_context( - cert: CertTypes = None, + cert: typing.Optional[CertTypes] = None, verify: VerifyTypes = True, trust_env: bool = True, http2: bool = False, @@ -62,7 +64,7 @@ class SSLConfig: def __init__( self, *, - cert: CertTypes = None, + cert: typing.Optional[CertTypes] = None, verify: VerifyTypes = True, trust_env: bool = True, http2: bool = False, @@ -74,12 +76,12 @@ def __init__( self.ssl_context = self.load_ssl_context() def load_ssl_context(self) -> ssl.SSLContext: - logger.trace( - f"load_ssl_context " - f"verify={self.verify!r} " - f"cert={self.cert!r} " - f"trust_env={self.trust_env!r} " - f"http2={self.http2!r}" + logger.debug( + "load_ssl_context verify=%r cert=%r trust_env=%r http2=%r", + self.verify, + self.cert, + self.trust_env, + self.http2, ) if self.verify: @@ -126,24 +128,27 @@ def load_ssl_context_verify(self) -> ssl.SSLContext: # Signal to server support for PHA in TLS 1.3. Raises an # AttributeError if only read-only access is implemented. - try: - context.post_handshake_auth = True # type: ignore - except AttributeError: # pragma: nocover - pass + if sys.version_info >= (3, 8): # pragma: no cover + try: + context.post_handshake_auth = True + except AttributeError: # pragma: no cover + pass # Disable using 'commonName' for SSLContext.check_hostname # when the 'subjectAltName' extension isn't available. try: - context.hostname_checks_common_name = False # type: ignore - except AttributeError: # pragma: nocover + context.hostname_checks_common_name = False + except AttributeError: # pragma: no cover pass if ca_bundle_path.is_file(): - logger.trace(f"load_verify_locations cafile={ca_bundle_path!s}") - context.load_verify_locations(cafile=str(ca_bundle_path)) + cafile = str(ca_bundle_path) + logger.debug("load_verify_locations cafile=%r", cafile) + context.load_verify_locations(cafile=cafile) elif ca_bundle_path.is_dir(): - logger.trace(f"load_verify_locations capath={ca_bundle_path!s}") - context.load_verify_locations(capath=str(ca_bundle_path)) + capath = str(ca_bundle_path) + logger.debug("load_verify_locations capath=%r", capath) + context.load_verify_locations(capath=capath) self._load_client_certs(context) @@ -163,10 +168,10 @@ def _create_default_ssl_context(self) -> ssl.SSLContext: alpn_idents = ["http/1.1", "h2"] if self.http2 else ["http/1.1"] context.set_alpn_protocols(alpn_idents) - if hasattr(context, "keylog_filename"): # pragma: nocover (Available in 3.8+) + if sys.version_info >= (3, 8): # pragma: no cover keylogfile = os.environ.get("SSLKEYLOGFILE") if keylogfile and self.trust_env: - context.keylog_filename = keylogfile # type: ignore + context.keylog_filename = keylogfile return context @@ -285,13 +290,14 @@ class Limits: * **max_keepalive_connections** - Allow the connection pool to maintain keep-alive connections below this point. Should be less than or equal to `max_connections`. + * **keepalive_expiry** - Time limit on idle keep-alive connections in seconds. """ def __init__( self, *, - max_connections: int = None, - max_keepalive_connections: int = None, + max_connections: typing.Optional[int] = None, + max_keepalive_connections: typing.Optional[int] = None, keepalive_expiry: typing.Optional[float] = 5.0, ): self.max_connections = max_connections @@ -317,41 +323,48 @@ def __repr__(self) -> str: class Proxy: def __init__( - self, url: URLTypes, *, headers: HeaderTypes = None, mode: str = "DEFAULT" + self, + url: URLTypes, + *, + ssl_context: typing.Optional[ssl.SSLContext] = None, + auth: typing.Optional[typing.Tuple[str, str]] = None, + headers: typing.Optional[HeaderTypes] = None, ): url = URL(url) headers = Headers(headers) - if url.scheme not in ("http", "https"): + if url.scheme not in ("http", "https", "socks5"): raise ValueError(f"Unknown scheme for proxy URL {url!r}") - if mode not in ("DEFAULT", "FORWARD_ONLY", "TUNNEL_ONLY"): - raise ValueError(f"Unknown proxy mode {mode!r}") if url.username or url.password: - headers.setdefault( - "Proxy-Authorization", - self._build_auth_header(url.username, url.password), - ) - # Remove userinfo from the URL authority, e.g.: - # 'username:password@proxy_host:proxy_port' -> 'proxy_host:proxy_port' + # Remove any auth credentials from the URL. + auth = (url.username, url.password) url = url.copy_with(username=None, password=None) self.url = url + self.auth = auth self.headers = headers - self.mode = mode + self.ssl_context = ssl_context - def _build_auth_header(self, username: str, password: str) -> str: - userpass = (username.encode("utf-8"), password.encode("utf-8")) - token = b64encode(b":".join(userpass)).decode() - return f"Basic {token}" - - def __repr__(self) -> str: + @property + def raw_auth(self) -> typing.Optional[typing.Tuple[bytes, bytes]]: + # The proxy authentication as raw bytes. return ( - f"Proxy(url={str(self.url)!r}, " - f"headers={dict(self.headers)!r}, " - f"mode={self.mode!r})" + None + if self.auth is None + else (self.auth[0].encode("utf-8"), self.auth[1].encode("utf-8")) ) + def __repr__(self) -> str: + # The authentication is represented with the password component masked. + auth = (self.auth[0], "********") if self.auth else None + + # Build a nice concise representation. + url_str = f"{str(self.url)!r}" + auth_str = f", auth={auth!r}" if auth else "" + headers_str = f", headers={dict(self.headers)!r}" if self.headers else "" + return f"Proxy({url_str}{auth_str}{headers_str})" + DEFAULT_TIMEOUT_CONFIG = Timeout(timeout=5.0) DEFAULT_LIMITS = Limits(max_connections=100, max_keepalive_connections=20) diff --git a/packages/httpx/_content.py b/packages/httpx/_content.py index 86f3c7c25..b16e12d95 100644 --- a/packages/httpx/_content.py +++ b/packages/httpx/_content.py @@ -8,6 +8,8 @@ Dict, Iterable, Iterator, + Mapping, + Optional, Tuple, Union, ) @@ -15,8 +17,14 @@ from ._exceptions import StreamClosed, StreamConsumed from ._multipart import MultipartStream -from ._transports.base import AsyncByteStream, SyncByteStream -from ._types import RequestContent, RequestData, RequestFiles, ResponseContent +from ._types import ( + AsyncByteStream, + RequestContent, + RequestData, + RequestFiles, + ResponseContent, + SyncByteStream, +) from ._utils import peek_filelike_length, primitive_value_to_str @@ -32,6 +40,8 @@ async def __aiter__(self) -> AsyncIterator[bytes]: class IteratorByteStream(SyncByteStream): + CHUNK_SIZE = 65_536 + def __init__(self, stream: Iterable[bytes]): self._stream = stream self._is_stream_consumed = False @@ -42,11 +52,21 @@ def __iter__(self) -> Iterator[bytes]: raise StreamConsumed() self._is_stream_consumed = True - for part in self._stream: - yield part + if hasattr(self._stream, "read"): + # File-like interfaces should use 'read' directly. + chunk = self._stream.read(self.CHUNK_SIZE) + while chunk: + yield chunk + chunk = self._stream.read(self.CHUNK_SIZE) + else: + # Otherwise iterate. + for part in self._stream: + yield part class AsyncIteratorByteStream(AsyncByteStream): + CHUNK_SIZE = 65_536 + def __init__(self, stream: AsyncIterable[bytes]): self._stream = stream self._is_stream_consumed = False @@ -57,8 +77,16 @@ async def __aiter__(self) -> AsyncIterator[bytes]: raise StreamConsumed() self._is_stream_consumed = True - async for part in self._stream: - yield part + if hasattr(self._stream, "aread"): + # File-like interfaces should use 'aread' directly. + chunk = await self._stream.aread(self.CHUNK_SIZE) + while chunk: + yield chunk + chunk = await self._stream.aread(self.CHUNK_SIZE) + else: + # Otherwise iterate. + async for part in self._stream: + yield part class UnattachedStream(AsyncByteStream, SyncByteStream): @@ -73,20 +101,23 @@ def __iter__(self) -> Iterator[bytes]: async def __aiter__(self) -> AsyncIterator[bytes]: raise StreamClosed() - yield b"" # pragma: nocover + yield b"" # pragma: no cover def encode_content( content: Union[str, bytes, Iterable[bytes], AsyncIterable[bytes]] ) -> Tuple[Dict[str, str], Union[SyncByteStream, AsyncByteStream]]: - if isinstance(content, (bytes, str)): body = content.encode("utf-8") if isinstance(content, str) else content content_length = len(body) headers = {"Content-Length": str(content_length)} if body else {} return headers, ByteStream(body) - elif isinstance(content, Iterable): + elif isinstance(content, Iterable) and not isinstance(content, dict): + # `not isinstance(content, dict)` is a bit oddly specific, but it + # catches a case that's easy for users to make in error, and would + # otherwise pass through here, like any other bytes-iterable, + # because `dict` happens to be iterable. See issue #2491. content_length_or_none = peek_filelike_length(content) if content_length_or_none is None: @@ -103,7 +134,7 @@ def encode_content( def encode_urlencoded_data( - data: dict, + data: RequestData, ) -> Tuple[Dict[str, str], ByteStream]: plain_data = [] for key, value in data.items(): @@ -119,7 +150,7 @@ def encode_urlencoded_data( def encode_multipart_data( - data: dict, files: RequestFiles, boundary: bytes = None + data: RequestData, files: RequestFiles, boundary: Optional[bytes] ) -> Tuple[Dict[str, str], MultipartStream]: multipart = MultipartStream(data=data, files=files, boundary=boundary) headers = multipart.get_headers() @@ -151,18 +182,18 @@ def encode_json(json: Any) -> Tuple[Dict[str, str], ByteStream]: def encode_request( - content: RequestContent = None, - data: RequestData = None, - files: RequestFiles = None, - json: Any = None, - boundary: bytes = None, + content: Optional[RequestContent] = None, + data: Optional[RequestData] = None, + files: Optional[RequestFiles] = None, + json: Optional[Any] = None, + boundary: Optional[bytes] = None, ) -> Tuple[Dict[str, str], Union[SyncByteStream, AsyncByteStream]]: """ Handles encoding the given `content`, `data`, `files`, and `json`, returning a two-tuple of (, ). """ - if data is not None and not isinstance(data, dict): - # We prefer to seperate `content=` + if data is not None and not isinstance(data, Mapping): + # We prefer to separate `content=` # for raw request content, and `data=
` for url encoded or # multipart form content. # @@ -186,10 +217,10 @@ def encode_request( def encode_response( - content: ResponseContent = None, - text: str = None, - html: str = None, - json: Any = None, + content: Optional[ResponseContent] = None, + text: Optional[str] = None, + html: Optional[str] = None, + json: Optional[Any] = None, ) -> Tuple[Dict[str, str], Union[SyncByteStream, AsyncByteStream]]: """ Handles encoding the given `content`, returning a two-tuple of diff --git a/packages/httpx/_decoders.py b/packages/httpx/_decoders.py index 2230b77a9..500ce7ffc 100644 --- a/packages/httpx/_decoders.py +++ b/packages/httpx/_decoders.py @@ -8,20 +8,16 @@ import typing import zlib +from ._compat import brotli from ._exceptions import DecodingError -try: - import brotlicffi -except ImportError: # pragma: nocover - brotlicffi = None - class ContentDecoder: def decode(self, data: bytes) -> bytes: - raise NotImplementedError() # pragma: nocover + raise NotImplementedError() # pragma: no cover def flush(self) -> bytes: - raise NotImplementedError() # pragma: nocover + raise NotImplementedError() # pragma: no cover class IdentityDecoder(ContentDecoder): @@ -61,7 +57,7 @@ def decode(self, data: bytes) -> bytes: def flush(self) -> bytes: try: return self.decompressor.flush() - except zlib.error as exc: # pragma: nocover + except zlib.error as exc: # pragma: no cover raise DecodingError(str(exc)) from exc @@ -84,7 +80,7 @@ def decode(self, data: bytes) -> bytes: def flush(self) -> bytes: try: return self.decompressor.flush() - except zlib.error as exc: # pragma: nocover + except zlib.error as exc: # pragma: no cover raise DecodingError(str(exc)) from exc @@ -99,27 +95,30 @@ class BrotliDecoder(ContentDecoder): """ def __init__(self) -> None: - if brotlicffi is None: # pragma: nocover + if brotli is None: # pragma: no cover raise ImportError( - "Using 'BrotliDecoder', but the 'brotlicffi' library " - "is not installed." + "Using 'BrotliDecoder', but neither of the 'brotlicffi' or 'brotli' " + "packages have been installed. " "Make sure to install httpx using `pip install httpx[brotli]`." ) from None - self.decompressor = brotlicffi.Decompressor() + self.decompressor = brotli.Decompressor() self.seen_data = False + self._decompress: typing.Callable[[bytes], bytes] if hasattr(self.decompressor, "decompress"): - self._decompress = self.decompressor.decompress + # The 'brotlicffi' package. + self._decompress = self.decompressor.decompress # pragma: no cover else: - self._decompress = self.decompressor.process # pragma: nocover + # The 'brotli' package. + self._decompress = self.decompressor.process # pragma: no cover def decode(self, data: bytes) -> bytes: if not data: return b"" self.seen_data = True try: - return self.decompressor.decompress(data) - except brotlicffi.Error as exc: + return self._decompress(data) + except brotli.error as exc: raise DecodingError(str(exc)) from exc def flush(self) -> bytes: @@ -127,9 +126,14 @@ def flush(self) -> bytes: return b"" try: if hasattr(self.decompressor, "finish"): - self.decompressor.finish() + # Only available in the 'brotlicffi' package. + + # As the decompressor decompresses eagerly, this + # will never actually emit any data. However, it will potentially throw + # errors if a truncated or damaged data stream has been used. + self.decompressor.finish() # pragma: no cover return b"" - except brotlicffi.Error as exc: # pragma: nocover + except brotli.error as exc: # pragma: no cover raise DecodingError(str(exc)) from exc @@ -163,13 +167,13 @@ class ByteChunker: Handles returning byte content in fixed-size chunks. """ - def __init__(self, chunk_size: int = None) -> None: + def __init__(self, chunk_size: typing.Optional[int] = None) -> None: self._buffer = io.BytesIO() self._chunk_size = chunk_size def decode(self, content: bytes) -> typing.List[bytes]: if self._chunk_size is None: - return [content] + return [content] if content else [] self._buffer.write(content) if self._buffer.tell() >= self._chunk_size: @@ -202,7 +206,7 @@ class TextChunker: Handles returning text content in fixed-size chunks. """ - def __init__(self, chunk_size: int = None) -> None: + def __init__(self, chunk_size: typing.Optional[int] = None) -> None: self._buffer = io.StringIO() self._chunk_size = chunk_size @@ -241,52 +245,13 @@ class TextDecoder: Handles incrementally decoding bytes into text """ - def __init__(self, encoding: typing.Optional[str] = None): - self.decoder: typing.Optional[codecs.IncrementalDecoder] = None - if encoding is not None: - self.decoder = codecs.getincrementaldecoder(encoding)(errors="strict") + def __init__(self, encoding: str = "utf-8"): + self.decoder = codecs.getincrementaldecoder(encoding)(errors="replace") def decode(self, data: bytes) -> str: - """ - If an encoding is explicitly specified, then we use that. - Otherwise our strategy is to attempt UTF-8, and fallback to Windows 1252. - - Note that UTF-8 is a strict superset of ascii, and Windows 1252 is a - superset of the non-control characters in iso-8859-1, so we essentially - end up supporting any of ascii, utf-8, iso-8859-1, cp1252. - - Given that UTF-8 is now by *far* the most widely used encoding, this - should be a pretty robust strategy for cases where a charset has - not been explicitly included. - - Useful stats on the prevalence of different charsets in the wild... - - * https://w3techs.com/technologies/overview/character_encoding - * https://w3techs.com/technologies/history_overview/character_encoding - - The HTML5 spec also has some useful guidelines, suggesting defaults of - either UTF-8 or Windows 1252 in most cases... - - * https://dev.w3.org/html5/spec-LC/Overview.html - """ - if self.decoder is None: - # If this is the first decode pass then we need to determine which - # encoding to use by attempting UTF-8 and raising any decode errors. - attempt_utf_8 = codecs.getincrementaldecoder("utf-8")(errors="strict") - try: - attempt_utf_8.decode(data) - except UnicodeDecodeError: - # Could not decode as UTF-8. Use Windows 1252. - self.decoder = codecs.getincrementaldecoder("cp1252")(errors="replace") - else: - # Can decode as UTF-8. Use UTF-8 with lenient error settings. - self.decoder = codecs.getincrementaldecoder("utf-8")(errors="replace") - return self.decoder.decode(data) def flush(self) -> str: - if self.decoder is None: - return "" return self.decoder.decode(b"", True) @@ -294,66 +259,56 @@ class LineDecoder: """ Handles incrementally reading lines from text. - Uses universal line decoding, supporting any of `\n`, `\r`, or `\r\n` - as line endings, normalizing to `\n`. + Has the same behaviour as the stdllib splitlines, but handling the input iteratively. """ def __init__(self) -> None: - self.buffer = "" + self.buffer: typing.List[str] = [] + self.trailing_cr: bool = False def decode(self, text: str) -> typing.List[str]: - lines = [] - - if text and self.buffer and self.buffer[-1] == "\r": - if text.startswith("\n"): - # Handle the case where we have an "\r\n" split across - # our previous input, and our new chunk. - lines.append(self.buffer[:-1] + "\n") - self.buffer = "" - text = text[1:] - else: - # Handle the case where we have "\r" at the end of our - # previous input. - lines.append(self.buffer[:-1] + "\n") - self.buffer = "" - - while text: - num_chars = len(text) - for idx in range(num_chars): - char = text[idx] - next_char = None if idx + 1 == num_chars else text[idx + 1] - if char == "\n": - lines.append(self.buffer + text[: idx + 1]) - self.buffer = "" - text = text[idx + 1 :] - break - elif char == "\r" and next_char == "\n": - lines.append(self.buffer + text[:idx] + "\n") - self.buffer = "" - text = text[idx + 2 :] - break - elif char == "\r" and next_char is not None: - lines.append(self.buffer + text[:idx] + "\n") - self.buffer = "" - text = text[idx + 1 :] - break - elif next_char is None: - self.buffer += text - text = "" - break + # See https://docs.python.org/3/library/stdtypes.html#str.splitlines + NEWLINE_CHARS = "\n\r\x0b\x0c\x1c\x1d\x1e\x85\u2028\u2029" + + # We always push a trailing `\r` into the next decode iteration. + if self.trailing_cr: + text = "\r" + text + self.trailing_cr = False + if text.endswith("\r"): + self.trailing_cr = True + text = text[:-1] + + if not text: + return [] + + trailing_newline = text[-1] in NEWLINE_CHARS + lines = text.splitlines() + + if len(lines) == 1 and not trailing_newline: + # No new lines, buffer the input and continue. + self.buffer.append(lines[0]) + return [] + + if self.buffer: + # Include any existing buffer in the first portion of the + # splitlines result. + lines = ["".join(self.buffer) + lines[0]] + lines[1:] + self.buffer = [] + + if not trailing_newline: + # If the last segment of splitlines is not newline terminated, + # then drop it from our output and start a new buffer. + self.buffer = [lines.pop()] return lines def flush(self) -> typing.List[str]: - if self.buffer.endswith("\r"): - # Handle the case where we had a trailing '\r', which could have - # been a '\r\n' pair. - lines = [self.buffer[:-1] + "\n"] - elif self.buffer: - lines = [self.buffer] - else: - lines = [] - self.buffer = "" + if not self.buffer and not self.trailing_cr: + return [] + + lines = ["".join(self.buffer)] + self.buffer = [] + self.trailing_cr = False return lines @@ -365,5 +320,5 @@ def flush(self) -> typing.List[str]: } -if brotlicffi is None: - SUPPORTED_DECODERS.pop("br") # pragma: nocover +if brotli is None: + SUPPORTED_DECODERS.pop("br") # pragma: no cover diff --git a/packages/httpx/_exceptions.py b/packages/httpx/_exceptions.py index b6e59aa05..24a4f8aba 100644 --- a/packages/httpx/_exceptions.py +++ b/packages/httpx/_exceptions.py @@ -21,7 +21,6 @@ - UnsupportedProtocol + DecodingError + TooManyRedirects - + RequestBodyUnavailable x HTTPStatusError * InvalidURL * CookieConflict @@ -35,7 +34,7 @@ import typing if typing.TYPE_CHECKING: - from ._models import Request, Response # pragma: nocover + from ._models import Request, Response # pragma: no cover class HTTPError(Exception): @@ -58,6 +57,17 @@ class HTTPError(Exception): def __init__(self, message: str) -> None: super().__init__(message) + self._request: typing.Optional["Request"] = None + + @property + def request(self) -> "Request": + if self._request is None: + raise RuntimeError("The .request property has not been set.") + return self._request + + @request.setter + def request(self, request: "Request") -> None: + self._request = request class RequestError(HTTPError): @@ -65,7 +75,9 @@ class RequestError(HTTPError): Base class for all exceptions that may occur when issuing a `.request()`. """ - def __init__(self, message: str, *, request: "Request" = None) -> None: + def __init__( + self, message: str, *, request: typing.Optional["Request"] = None + ) -> None: super().__init__(message) # At the point an exception is raised we won't typically have a request # instance to associate it with. @@ -75,16 +87,6 @@ def __init__(self, message: str, *, request: "Request" = None) -> None: # have a `.request` property set on them. self._request = request - @property - def request(self) -> "Request": - if self._request is None: - raise RuntimeError("The .request property has not been set.") - return self._request - - @request.setter - def request(self, request: "Request") -> None: - self._request = request - class TransportError(RequestError): """ @@ -199,7 +201,7 @@ class RemoteProtocolError(ProtocolError): """ The protocol was violated by the server. - For exaample, returning malformed HTTP. + For example, returning malformed HTTP. """ @@ -326,7 +328,9 @@ def __init__(self) -> None: @contextlib.contextmanager -def request_context(request: "Request" = None) -> typing.Iterator[None]: +def request_context( + request: typing.Optional["Request"] = None, +) -> typing.Iterator[None]: """ A context manager that can be used to attach the given request context to any `RequestError` exceptions that are raised within the block. diff --git a/packages/httpx/_main.py b/packages/httpx/_main.py new file mode 100644 index 000000000..7c12ce841 --- /dev/null +++ b/packages/httpx/_main.py @@ -0,0 +1,506 @@ +import functools +import json +import sys +import typing + +import click +import httpcore +import pygments.lexers +import pygments.util +import rich.console +import rich.markup +import rich.progress +import rich.syntax +import rich.table + +from ._client import Client +from ._exceptions import RequestError +from ._models import Response +from ._status_codes import codes + + +def print_help() -> None: + console = rich.console.Console() + + console.print("[bold]HTTPX :butterfly:", justify="center") + console.print() + console.print("A next generation HTTP client.", justify="center") + console.print() + console.print( + "Usage: [bold]httpx[/bold] [cyan] [OPTIONS][/cyan] ", justify="left" + ) + console.print() + + table = rich.table.Table.grid(padding=1, pad_edge=True) + table.add_column("Parameter", no_wrap=True, justify="left", style="bold") + table.add_column("Description") + table.add_row( + "-m, --method [cyan]METHOD", + "Request method, such as GET, POST, PUT, PATCH, DELETE, OPTIONS, HEAD.\n" + "[Default: GET, or POST if a request body is included]", + ) + table.add_row( + "-p, --params [cyan] ...", + "Query parameters to include in the request URL.", + ) + table.add_row( + "-c, --content [cyan]TEXT", "Byte content to include in the request body." + ) + table.add_row( + "-d, --data [cyan] ...", "Form data to include in the request body." + ) + table.add_row( + "-f, --files [cyan] ...", + "Form files to include in the request body.", + ) + table.add_row("-j, --json [cyan]TEXT", "JSON data to include in the request body.") + table.add_row( + "-h, --headers [cyan] ...", + "Include additional HTTP headers in the request.", + ) + table.add_row( + "--cookies [cyan] ...", "Cookies to include in the request." + ) + table.add_row( + "--auth [cyan]", + "Username and password to include in the request. Specify '-' for the password to use " + "a password prompt. Note that using --verbose/-v will expose the Authorization " + "header, including the password encoding in a trivially reversible format.", + ) + + table.add_row( + "--proxies [cyan]URL", + "Send the request via a proxy. Should be the URL giving the proxy address.", + ) + + table.add_row( + "--timeout [cyan]FLOAT", + "Timeout value to use for network operations, such as establishing the connection, " + "reading some data, etc... [Default: 5.0]", + ) + + table.add_row("--follow-redirects", "Automatically follow redirects.") + table.add_row("--no-verify", "Disable SSL verification.") + table.add_row( + "--http2", "Send the request using HTTP/2, if the remote server supports it." + ) + + table.add_row( + "--download [cyan]FILE", + "Save the response content as a file, rather than displaying it.", + ) + + table.add_row("-v, --verbose", "Verbose output. Show request as well as response.") + table.add_row("--help", "Show this message and exit.") + console.print(table) + + +def get_lexer_for_response(response: Response) -> str: + content_type = response.headers.get("Content-Type") + if content_type is not None: + mime_type, _, _ = content_type.partition(";") + try: + return typing.cast( + str, pygments.lexers.get_lexer_for_mimetype(mime_type.strip()).name + ) + except pygments.util.ClassNotFound: # pragma: no cover + pass + return "" # pragma: no cover + + +def format_request_headers(request: httpcore.Request, http2: bool = False) -> str: + version = "HTTP/2" if http2 else "HTTP/1.1" + headers = [ + (name.lower() if http2 else name, value) for name, value in request.headers + ] + method = request.method.decode("ascii") + target = request.url.target.decode("ascii") + lines = [f"{method} {target} {version}"] + [ + f"{name.decode('ascii')}: {value.decode('ascii')}" for name, value in headers + ] + return "\n".join(lines) + + +def format_response_headers( + http_version: bytes, + status: int, + reason_phrase: typing.Optional[bytes], + headers: typing.List[typing.Tuple[bytes, bytes]], +) -> str: + version = http_version.decode("ascii") + reason = ( + codes.get_reason_phrase(status) + if reason_phrase is None + else reason_phrase.decode("ascii") + ) + lines = [f"{version} {status} {reason}"] + [ + f"{name.decode('ascii')}: {value.decode('ascii')}" for name, value in headers + ] + return "\n".join(lines) + + +def print_request_headers(request: httpcore.Request, http2: bool = False) -> None: + console = rich.console.Console() + http_text = format_request_headers(request, http2=http2) + syntax = rich.syntax.Syntax(http_text, "http", theme="ansi_dark", word_wrap=True) + console.print(syntax) + syntax = rich.syntax.Syntax("", "http", theme="ansi_dark", word_wrap=True) + console.print(syntax) + + +def print_response_headers( + http_version: bytes, + status: int, + reason_phrase: typing.Optional[bytes], + headers: typing.List[typing.Tuple[bytes, bytes]], +) -> None: + console = rich.console.Console() + http_text = format_response_headers(http_version, status, reason_phrase, headers) + syntax = rich.syntax.Syntax(http_text, "http", theme="ansi_dark", word_wrap=True) + console.print(syntax) + syntax = rich.syntax.Syntax("", "http", theme="ansi_dark", word_wrap=True) + console.print(syntax) + + +def print_response(response: Response) -> None: + console = rich.console.Console() + lexer_name = get_lexer_for_response(response) + if lexer_name: + if lexer_name.lower() == "json": + try: + data = response.json() + text = json.dumps(data, indent=4) + except ValueError: # pragma: no cover + text = response.text + else: + text = response.text + + syntax = rich.syntax.Syntax(text, lexer_name, theme="ansi_dark", word_wrap=True) + console.print(syntax) + else: + console.print(f"<{len(response.content)} bytes of binary data>") + + +_PCTRTT = typing.Tuple[typing.Tuple[str, str], ...] +_PCTRTTT = typing.Tuple[_PCTRTT, ...] +_PeerCertRetDictType = typing.Dict[str, typing.Union[str, _PCTRTTT, _PCTRTT]] + + +def format_certificate(cert: _PeerCertRetDictType) -> str: # pragma: no cover + lines = [] + for key, value in cert.items(): + if isinstance(value, (list, tuple)): + lines.append(f"* {key}:") + for item in value: + if key in ("subject", "issuer"): + for sub_item in item: + lines.append(f"* {sub_item[0]}: {sub_item[1]!r}") + elif isinstance(item, tuple) and len(item) == 2: + lines.append(f"* {item[0]}: {item[1]!r}") + else: + lines.append(f"* {item!r}") + else: + lines.append(f"* {key}: {value!r}") + return "\n".join(lines) + + +def trace( + name: str, info: typing.Mapping[str, typing.Any], verbose: bool = False +) -> None: + console = rich.console.Console() + if name == "connection.connect_tcp.started" and verbose: + host = info["host"] + console.print(f"* Connecting to {host!r}") + elif name == "connection.connect_tcp.complete" and verbose: + stream = info["return_value"] + server_addr = stream.get_extra_info("server_addr") + console.print(f"* Connected to {server_addr[0]!r} on port {server_addr[1]}") + elif name == "connection.start_tls.complete" and verbose: # pragma: no cover + stream = info["return_value"] + ssl_object = stream.get_extra_info("ssl_object") + version = ssl_object.version() + cipher = ssl_object.cipher() + server_cert = ssl_object.getpeercert() + alpn = ssl_object.selected_alpn_protocol() + console.print(f"* SSL established using {version!r} / {cipher[0]!r}") + console.print(f"* Selected ALPN protocol: {alpn!r}") + if server_cert: + console.print("* Server certificate:") + console.print(format_certificate(server_cert)) + elif name == "http11.send_request_headers.started" and verbose: + request = info["request"] + print_request_headers(request, http2=False) + elif name == "http2.send_request_headers.started" and verbose: # pragma: no cover + request = info["request"] + print_request_headers(request, http2=True) + elif name == "http11.receive_response_headers.complete": + http_version, status, reason_phrase, headers = info["return_value"] + print_response_headers(http_version, status, reason_phrase, headers) + elif name == "http2.receive_response_headers.complete": # pragma: no cover + status, headers = info["return_value"] + http_version = b"HTTP/2" + reason_phrase = None + print_response_headers(http_version, status, reason_phrase, headers) + + +def download_response(response: Response, download: typing.BinaryIO) -> None: + console = rich.console.Console() + console.print() + content_length = response.headers.get("Content-Length") + with rich.progress.Progress( + "[progress.description]{task.description}", + "[progress.percentage]{task.percentage:>3.0f}%", + rich.progress.BarColumn(bar_width=None), + rich.progress.DownloadColumn(), + rich.progress.TransferSpeedColumn(), + ) as progress: + description = f"Downloading [bold]{rich.markup.escape(download.name)}" + download_task = progress.add_task( + description, + total=int(content_length or 0), + start=content_length is not None, + ) + for chunk in response.iter_bytes(): + download.write(chunk) + progress.update(download_task, completed=response.num_bytes_downloaded) + + +def validate_json( + ctx: click.Context, + param: typing.Union[click.Option, click.Parameter], + value: typing.Any, +) -> typing.Any: + if value is None: + return None + + try: + return json.loads(value) + except json.JSONDecodeError: # pragma: no cover + raise click.BadParameter("Not valid JSON") + + +def validate_auth( + ctx: click.Context, + param: typing.Union[click.Option, click.Parameter], + value: typing.Any, +) -> typing.Any: + if value == (None, None): + return None + + username, password = value + if password == "-": # pragma: no cover + password = click.prompt("Password", hide_input=True) + return (username, password) + + +def handle_help( + ctx: click.Context, + param: typing.Union[click.Option, click.Parameter], + value: typing.Any, +) -> None: + if not value or ctx.resilient_parsing: + return + + print_help() + ctx.exit() + + +@click.command(add_help_option=False) +@click.argument("url", type=str) +@click.option( + "--method", + "-m", + "method", + type=str, + help=( + "Request method, such as GET, POST, PUT, PATCH, DELETE, OPTIONS, HEAD. " + "[Default: GET, or POST if a request body is included]" + ), +) +@click.option( + "--params", + "-p", + "params", + type=(str, str), + multiple=True, + help="Query parameters to include in the request URL.", +) +@click.option( + "--content", + "-c", + "content", + type=str, + help="Byte content to include in the request body.", +) +@click.option( + "--data", + "-d", + "data", + type=(str, str), + multiple=True, + help="Form data to include in the request body.", +) +@click.option( + "--files", + "-f", + "files", + type=(str, click.File(mode="rb")), + multiple=True, + help="Form files to include in the request body.", +) +@click.option( + "--json", + "-j", + "json", + type=str, + callback=validate_json, + help="JSON data to include in the request body.", +) +@click.option( + "--headers", + "-h", + "headers", + type=(str, str), + multiple=True, + help="Include additional HTTP headers in the request.", +) +@click.option( + "--cookies", + "cookies", + type=(str, str), + multiple=True, + help="Cookies to include in the request.", +) +@click.option( + "--auth", + "auth", + type=(str, str), + default=(None, None), + callback=validate_auth, + help=( + "Username and password to include in the request. " + "Specify '-' for the password to use a password prompt. " + "Note that using --verbose/-v will expose the Authorization header, " + "including the password encoding in a trivially reversible format." + ), +) +@click.option( + "--proxies", + "proxies", + type=str, + default=None, + help="Send the request via a proxy. Should be the URL giving the proxy address.", +) +@click.option( + "--timeout", + "timeout", + type=float, + default=5.0, + help=( + "Timeout value to use for network operations, such as establishing the " + "connection, reading some data, etc... [Default: 5.0]" + ), +) +@click.option( + "--follow-redirects", + "follow_redirects", + is_flag=True, + default=False, + help="Automatically follow redirects.", +) +@click.option( + "--no-verify", + "verify", + is_flag=True, + default=True, + help="Disable SSL verification.", +) +@click.option( + "--http2", + "http2", + type=bool, + is_flag=True, + default=False, + help="Send the request using HTTP/2, if the remote server supports it.", +) +@click.option( + "--download", + type=click.File("wb"), + help="Save the response content as a file, rather than displaying it.", +) +@click.option( + "--verbose", + "-v", + type=bool, + is_flag=True, + default=False, + help="Verbose. Show request as well as response.", +) +@click.option( + "--help", + is_flag=True, + is_eager=True, + expose_value=False, + callback=handle_help, + help="Show this message and exit.", +) +def main( + url: str, + method: str, + params: typing.List[typing.Tuple[str, str]], + content: str, + data: typing.List[typing.Tuple[str, str]], + files: typing.List[typing.Tuple[str, click.File]], + json: str, + headers: typing.List[typing.Tuple[str, str]], + cookies: typing.List[typing.Tuple[str, str]], + auth: typing.Optional[typing.Tuple[str, str]], + proxies: str, + timeout: float, + follow_redirects: bool, + verify: bool, + http2: bool, + download: typing.Optional[typing.BinaryIO], + verbose: bool, +) -> None: + """ + An HTTP command line client. + Sends a request and displays the response. + """ + if not method: + method = "POST" if content or data or files or json else "GET" + + try: + with Client( + proxies=proxies, + timeout=timeout, + verify=verify, + http2=http2, + ) as client: + with client.stream( + method, + url, + params=list(params), + content=content, + data=dict(data), + files=files, # type: ignore + json=json, + headers=headers, + cookies=dict(cookies), + auth=auth, + follow_redirects=follow_redirects, + extensions={"trace": functools.partial(trace, verbose=verbose)}, + ) as response: + if download is not None: + download_response(response, download) + else: + response.read() + if response.content: + print_response(response) + + except RequestError as exc: + console = rich.console.Console() + console.print(f"[red]{type(exc).__name__}[/red]: {exc}") + sys.exit(1) + + sys.exit(0 if response.is_success else 1) diff --git a/packages/httpx/_models.py b/packages/httpx/_models.py index 06ebb92c4..e1e45cf06 100644 --- a/packages/httpx/_models.py +++ b/packages/httpx/_models.py @@ -1,16 +1,10 @@ -import cgi import datetime import email.message import json as jsonlib import typing import urllib.request -from collections.abc import MutableMapping +from collections.abc import Mapping from http.cookiejar import Cookie, CookieJar -from urllib.parse import parse_qs, quote, unquote, urlencode - -import idna -import rfc3986 -import rfc3986.exceptions from ._content import ByteStream, UnattachedStream, encode_request, encode_response from ._decoders import ( @@ -26,812 +20,54 @@ from ._exceptions import ( CookieConflict, HTTPStatusError, - InvalidURL, RequestNotRead, ResponseNotRead, StreamClosed, StreamConsumed, request_context, ) +from ._multipart import get_multipart_boundary_from_content_type from ._status_codes import codes -from ._transports.base import AsyncByteStream, SyncByteStream from ._types import ( + AsyncByteStream, CookieTypes, HeaderTypes, - PrimitiveData, QueryParamTypes, - RawURL, RequestContent, RequestData, + RequestExtensions, RequestFiles, ResponseContent, - URLTypes, + ResponseExtensions, + SyncByteStream, ) +from ._urls import URL from ._utils import ( guess_json_utf, is_known_encoding, normalize_header_key, normalize_header_value, obfuscate_sensitive_headers, + parse_content_type_charset, parse_header_links, - primitive_value_to_str, ) -class URL: - """ - url = httpx.URL("HTTPS://jo%40email.com:a%20secret@müller.de:1234/pa%20th?search=ab#anchorlink") - - assert url.scheme == "https" - assert url.username == "jo@email.com" - assert url.password == "a secret" - assert url.userinfo == b"jo%40email.com:a%20secret" - assert url.host == "müller.de" - assert url.raw_host == b"xn--mller-kva.de" - assert url.port == 1234 - assert url.netloc == b"xn--mller-kva.de:1234" - assert url.path == "/pa th" - assert url.query == b"?search=ab" - assert url.raw_path == b"/pa%20th?search=ab" - assert url.fragment == "anchorlink" - - The components of a URL are broken down like this: - - https://jo%40email.com:a%20secret@müller.de:1234/pa%20th?search=ab#anchorlink - [scheme] [ username ] [password] [ host ][port][ path ] [ query ] [fragment] - [ userinfo ] [ netloc ][ raw_path ] - - Note that: - - * `url.scheme` is normalized to always be lowercased. - - * `url.host` is normalized to always be lowercased. Internationalized domain - names are represented in unicode, without IDNA encoding applied. For instance: - - url = httpx.URL("http://中国.icom.museum") - assert url.host == "中国.icom.museum" - url = httpx.URL("http://xn--fiqs8s.icom.museum") - assert url.host == "中国.icom.museum" - - * `url.raw_host` is normalized to always be lowercased, and is IDNA encoded. - - url = httpx.URL("http://中国.icom.museum") - assert url.raw_host == b"xn--fiqs8s.icom.museum" - url = httpx.URL("http://xn--fiqs8s.icom.museum") - assert url.raw_host == b"xn--fiqs8s.icom.museum" - - * `url.port` is either None or an integer. URLs that include the default port for - "http", "https", "ws", "wss", and "ftp" schemes have their port normalized to `None`. - - assert httpx.URL("http://example.com") == httpx.URL("http://example.com:80") - assert httpx.URL("http://example.com").port is None - assert httpx.URL("http://example.com:80").port is None - - * `url.userinfo` is raw bytes, without URL escaping. Usually you'll want to work with - `url.username` and `url.password` instead, which handle the URL escaping. - - * `url.raw_path` is raw bytes of both the path and query, without URL escaping. - This portion is used as the target when constructing HTTP requests. Usually you'll - want to work with `url.path` instead. - - * `url.query` is raw bytes, without URL escaping. A URL query string portion can only - be properly URL escaped when decoding the parameter names and values themselves. - """ - - def __init__( - self, url: typing.Union["URL", str, RawURL] = "", **kwargs: typing.Any - ) -> None: - if isinstance(url, (str, tuple)): - if isinstance(url, tuple): - raw_scheme, raw_host, port, raw_path = url - scheme = raw_scheme.decode("ascii") - host = raw_host.decode("ascii") - if host and ":" in host and host[0] != "[": - # it's an IPv6 address, so it should be enclosed in "[" and "]" - # ref: https://tools.ietf.org/html/rfc2732#section-2 - # ref: https://tools.ietf.org/html/rfc3986#section-3.2.2 - host = f"[{host}]" - port_str = "" if port is None else f":{port}" - path = raw_path.decode("ascii") - url = f"{scheme}://{host}{port_str}{path}" - - try: - self._uri_reference = rfc3986.iri_reference(url).encode() - except rfc3986.exceptions.InvalidAuthority as exc: - raise InvalidURL(message=str(exc)) from None - - if self.is_absolute_url: - # We don't want to normalize relative URLs, since doing so - # removes any leading `../` portion. - self._uri_reference = self._uri_reference.normalize() - elif isinstance(url, URL): - self._uri_reference = url._uri_reference - else: - raise TypeError( - f"Invalid type for url. Expected str or httpx.URL, got {type(url)}: {url!r}" - ) - - # Perform port normalization, following the WHATWG spec for default ports. - # - # See: - # * https://tools.ietf.org/html/rfc3986#section-3.2.3 - # * https://url.spec.whatwg.org/#url-miscellaneous - # * https://url.spec.whatwg.org/#scheme-state - default_port = { - "ftp": ":21", - "http": ":80", - "https": ":443", - "ws": ":80", - "wss": ":443", - }.get(self._uri_reference.scheme, "") - authority = self._uri_reference.authority or "" - if default_port and authority.endswith(default_port): - authority = authority[: -len(default_port)] - self._uri_reference = self._uri_reference.copy_with(authority=authority) - - if kwargs: - self._uri_reference = self.copy_with(**kwargs)._uri_reference - - @property - def scheme(self) -> str: - """ - The URL scheme, such as "http", "https". - Always normalised to lowercase. - """ - return self._uri_reference.scheme or "" - - @property - def raw_scheme(self) -> bytes: - """ - The raw bytes representation of the URL scheme, such as b"http", b"https". - Always normalised to lowercase. - """ - return self.scheme.encode("ascii") - - @property - def userinfo(self) -> bytes: - """ - The URL userinfo as a raw bytestring. - For example: b"jo%40email.com:a%20secret". - """ - userinfo = self._uri_reference.userinfo or "" - return userinfo.encode("ascii") - - @property - def username(self) -> str: - """ - The URL username as a string, with URL decoding applied. - For example: "jo@email.com" - """ - userinfo = self._uri_reference.userinfo or "" - return unquote(userinfo.partition(":")[0]) - - @property - def password(self) -> str: - """ - The URL password as a string, with URL decoding applied. - For example: "a secret" - """ - userinfo = self._uri_reference.userinfo or "" - return unquote(userinfo.partition(":")[2]) - - @property - def host(self) -> str: - """ - The URL host as a string. - Always normalized to lowercase, with IDNA hosts decoded into unicode. - - Examples: - - url = httpx.URL("http://www.EXAMPLE.org") - assert url.host == "www.example.org" - - url = httpx.URL("http://中国.icom.museum") - assert url.host == "中国.icom.museum" - - url = httpx.URL("http://xn--fiqs8s.icom.museum") - assert url.host == "中国.icom.museum" - - url = httpx.URL("https://[::ffff:192.168.0.1]") - assert url.host == "::ffff:192.168.0.1" - """ - host: str = self._uri_reference.host or "" - - if host and ":" in host and host[0] == "[": - # it's an IPv6 address - host = host.lstrip("[").rstrip("]") - - if host.startswith("xn--"): - host = idna.decode(host) - - return host - - @property - def raw_host(self) -> bytes: - """ - The raw bytes representation of the URL host. - Always normalized to lowercase, and IDNA encoded. - - Examples: - - url = httpx.URL("http://www.EXAMPLE.org") - assert url.raw_host == b"www.example.org" - - url = httpx.URL("http://中国.icom.museum") - assert url.raw_host == b"xn--fiqs8s.icom.museum" - - url = httpx.URL("http://xn--fiqs8s.icom.museum") - assert url.raw_host == b"xn--fiqs8s.icom.museum" - - url = httpx.URL("https://[::ffff:192.168.0.1]") - assert url.raw_host == b"::ffff:192.168.0.1" - """ - host: str = self._uri_reference.host or "" - - if host and ":" in host and host[0] == "[": - # it's an IPv6 address - host = host.lstrip("[").rstrip("]") - - return host.encode("ascii") - - @property - def port(self) -> typing.Optional[int]: - """ - The URL port as an integer. - - Note that the URL class performs port normalization as per the WHATWG spec. - Default ports for "http", "https", "ws", "wss", and "ftp" schemes are always - treated as `None`. - - For example: - - assert httpx.URL("http://www.example.com") == httpx.URL("http://www.example.com:80") - assert httpx.URL("http://www.example.com:80").port is None - """ - port = self._uri_reference.port - return int(port) if port else None - - @property - def netloc(self) -> bytes: - """ - Either `` or `:` as bytes. - Always normalized to lowercase, and IDNA encoded. - - This property may be used for generating the value of a request - "Host" header. - """ - host = self._uri_reference.host or "" - port = self._uri_reference.port - netloc = host.encode("ascii") - if port: - netloc = netloc + b":" + port.encode("ascii") - return netloc - - @property - def path(self) -> str: - """ - The URL path as a string. Excluding the query string, and URL decoded. - - For example: - - url = httpx.URL("https://example.com/pa%20th") - assert url.path == "/pa th" - """ - path = self._uri_reference.path or "/" - return unquote(path) - - @property - def query(self) -> bytes: - """ - The URL query string, as raw bytes, excluding the leading b"?". - - This is neccessarily a bytewise interface, because we cannot - perform URL decoding of this representation until we've parsed - the keys and values into a QueryParams instance. - - For example: - - url = httpx.URL("https://example.com/?filter=some%20search%20terms") - assert url.query == b"filter=some%20search%20terms" - """ - query = self._uri_reference.query or "" - return query.encode("ascii") - - @property - def params(self) -> "QueryParams": - """ - The URL query parameters, neatly parsed and packaged into an immutable - multidict representation. - """ - return QueryParams(self._uri_reference.query) - - @property - def raw_path(self) -> bytes: - """ - The complete URL path and query string as raw bytes. - Used as the target when constructing HTTP requests. - - For example: - - GET /users?search=some%20text HTTP/1.1 - Host: www.example.org - Connection: close - """ - path = self._uri_reference.path or "/" - if self._uri_reference.query is not None: - path += "?" + self._uri_reference.query - return path.encode("ascii") - - @property - def fragment(self) -> str: - """ - The URL fragments, as used in HTML anchors. - As a string, without the leading '#'. - """ - return unquote(self._uri_reference.fragment or "") - - @property - def raw(self) -> RawURL: - """ - The URL in the raw representation used by the low level - transport API. See `BaseTransport.handle_request`. - - Provides the (scheme, host, port, target) for the outgoing request. - """ - return ( - self.raw_scheme, - self.raw_host, - self.port, - self.raw_path, - ) - - @property - def is_absolute_url(self) -> bool: - """ - Return `True` for absolute URLs such as 'http://example.com/path', - and `False` for relative URLs such as '/path'. - """ - # We don't use `.is_absolute` from `rfc3986` because it treats - # URLs with a fragment portion as not absolute. - # What we actually care about is if the URL provides - # a scheme and hostname to which connections should be made. - return bool(self._uri_reference.scheme and self._uri_reference.host) - - @property - def is_relative_url(self) -> bool: - """ - Return `False` for absolute URLs such as 'http://example.com/path', - and `True` for relative URLs such as '/path'. - """ - return not self.is_absolute_url - - def copy_with(self, **kwargs: typing.Any) -> "URL": - """ - Copy this URL, returning a new URL with some components altered. - Accepts the same set of parameters as the components that are made - available via properties on the `URL` class. - - For example: - - url = httpx.URL("https://www.example.com").copy_with(username="jo@gmail.com", password="a secret") - assert url == "https://jo%40email.com:a%20secret@www.example.com" - """ - allowed = { - "scheme": str, - "username": str, - "password": str, - "userinfo": bytes, - "host": str, - "port": int, - "netloc": bytes, - "path": str, - "query": bytes, - "raw_path": bytes, - "fragment": str, - "params": object, - } - - # Step 1 - # ====== - # - # Perform type checking for all supported keyword arguments. - for key, value in kwargs.items(): - if key not in allowed: - message = f"{key!r} is an invalid keyword argument for copy_with()" - raise TypeError(message) - if value is not None and not isinstance(value, allowed[key]): - expected = allowed[key].__name__ - seen = type(value).__name__ - message = f"Argument {key!r} must be {expected} but got {seen}" - raise TypeError(message) - - # Step 2 - # ====== - # - # Consolidate "username", "password", "userinfo", "host", "port" and "netloc" - # into a single "authority" keyword, for `rfc3986`. - if "username" in kwargs or "password" in kwargs: - # Consolidate "username" and "password" into "userinfo". - username = quote(kwargs.pop("username", self.username) or "") - password = quote(kwargs.pop("password", self.password) or "") - userinfo = f"{username}:{password}" if password else username - kwargs["userinfo"] = userinfo.encode("ascii") - - if "host" in kwargs or "port" in kwargs: - # Consolidate "host" and "port" into "netloc". - host = kwargs.pop("host", self.host) or "" - port = kwargs.pop("port", self.port) - - if host and ":" in host and host[0] != "[": - # IPv6 addresses need to be escaped within sqaure brackets. - host = f"[{host}]" - - kwargs["netloc"] = ( - f"{host}:{port}".encode("ascii") - if port is not None - else host.encode("ascii") - ) - - if "userinfo" in kwargs or "netloc" in kwargs: - # Consolidate "userinfo" and "netloc" into authority. - userinfo = (kwargs.pop("userinfo", self.userinfo) or b"").decode("ascii") - netloc = (kwargs.pop("netloc", self.netloc) or b"").decode("ascii") - authority = f"{userinfo}@{netloc}" if userinfo else netloc - kwargs["authority"] = authority - - # Step 3 - # ====== - # - # Wrangle any "path", "query", "raw_path" and "params" keywords into - # "query" and "path" keywords for `rfc3986`. - if "raw_path" in kwargs: - # If "raw_path" is included, then split it into "path" and "query" components. - raw_path = kwargs.pop("raw_path") or b"" - path, has_query, query = raw_path.decode("ascii").partition("?") - kwargs["path"] = path - kwargs["query"] = query if has_query else None - - else: - if kwargs.get("path") is not None: - # Ensure `kwargs["path"] = ` for `rfc3986`. - kwargs["path"] = quote(kwargs["path"]) - - if kwargs.get("query") is not None: - # Ensure `kwargs["query"] = ` for `rfc3986`. - # - # Note that `.copy_with(query=None)` and `.copy_with(query=b"")` - # are subtly different. The `None` style will not include an empty - # trailing "?" character. - kwargs["query"] = kwargs["query"].decode("ascii") - - if "params" in kwargs: - # Replace any "params" keyword with the raw "query" instead. - # - # Ensure that empty params use `kwargs["query"] = None` rather - # than `kwargs["query"] = ""`, so that generated URLs do not - # include an empty trailing "?". - params = kwargs.pop("params") - kwargs["query"] = None if not params else str(QueryParams(params)) - - # Step 4 - # ====== - # - # Ensure any fragment component is quoted. - if kwargs.get("fragment") is not None: - kwargs["fragment"] = quote(kwargs["fragment"]) - - # Step 5 - # ====== - # - # At this point kwargs may include keys for "scheme", "authority", "path", - # "query" and "fragment". Together these constitute the entire URL. - # - # See https://tools.ietf.org/html/rfc3986#section-3 - # - # foo://example.com:8042/over/there?name=ferret#nose - # \_/ \______________/\_________/ \_________/ \__/ - # | | | | | - # scheme authority path query fragment - return URL(self._uri_reference.copy_with(**kwargs).unsplit()) - - def copy_set_param(self, key: str, value: typing.Any = None) -> "URL": - return self.copy_with(params=self.params.set(key, value)) - - def copy_add_param(self, key: str, value: typing.Any = None) -> "URL": - return self.copy_with(params=self.params.add(key, value)) - - def copy_remove_param(self, key: str) -> "URL": - return self.copy_with(params=self.params.remove(key)) - - def copy_merge_params(self, params: QueryParamTypes) -> "URL": - return self.copy_with(params=self.params.merge(params)) - - def join(self, url: URLTypes) -> "URL": - """ - Return an absolute URL, using this URL as the base. - - Eg. - - url = httpx.URL("https://www.example.com/test") - url = url.join("/new/path") - assert url == "https://www.example.com/new/path" - """ - if self.is_relative_url: - # Workaround to handle relative URLs, which otherwise raise - # rfc3986.exceptions.ResolutionError when used as an argument - # in `.resolve_with`. - return ( - self.copy_with(scheme="http", host="example.com") - .join(url) - .copy_with(scheme=None, host=None) - ) - - # We drop any fragment portion, because RFC 3986 strictly - # treats URLs with a fragment portion as not being absolute URLs. - base_uri = self._uri_reference.copy_with(fragment=None) - relative_url = URL(url) - return URL(relative_url._uri_reference.resolve_with(base_uri).unsplit()) - - def __hash__(self) -> int: - return hash(str(self)) - - def __eq__(self, other: typing.Any) -> bool: - return isinstance(other, (URL, str)) and str(self) == str(URL(other)) - - def __str__(self) -> str: - return self._uri_reference.unsplit() - - def __repr__(self) -> str: - class_name = self.__class__.__name__ - url_str = str(self) - if self._uri_reference.userinfo: - # Mask any password component in the URL representation, to lower the - # risk of unintended leakage, such as in debug information and logging. - username = quote(self.username) - url_str = ( - rfc3986.urlparse(url_str) - .copy_with(userinfo=f"{username}:[secure]") - .unsplit() - ) - return f"{class_name}({url_str!r})" - - -class QueryParams(typing.Mapping[str, str]): - """ - URL query parameters, as a multi-dict. - """ - - def __init__(self, *args: QueryParamTypes, **kwargs: typing.Any) -> None: - assert len(args) < 2, "Too many arguments." - assert not (args and kwargs), "Cannot mix named and unnamed arguments." - - value = args[0] if args else kwargs - - items: typing.Sequence[typing.Tuple[str, PrimitiveData]] - if value is None or isinstance(value, (str, bytes)): - value = value.decode("ascii") if isinstance(value, bytes) else value - self._dict = parse_qs(value) - elif isinstance(value, QueryParams): - self._dict = {k: list(v) for k, v in value._dict.items()} - else: - dict_value: typing.Dict[typing.Any, typing.List[typing.Any]] = {} - if isinstance(value, (list, tuple)): - # Convert list inputs like: - # [("a", "123"), ("a", "456"), ("b", "789")] - # To a dict representation, like: - # {"a": ["123", "456"], "b": ["789"]} - for item in value: - dict_value.setdefault(item[0], []).append(item[1]) - else: - # Convert dict inputs like: - # {"a": "123", "b": ["456", "789"]} - # To dict inputs where values are always lists, like: - # {"a": ["123"], "b": ["456", "789"]} - dict_value = { - k: list(v) if isinstance(v, (list, tuple)) else [v] - for k, v in value.items() - } - - # Ensure that keys and values are neatly coerced to strings. - # We coerce values `True` and `False` to JSON-like "true" and "false" - # representations, and coerce `None` values to the empty string. - self._dict = { - str(k): [primitive_value_to_str(item) for item in v] - for k, v in dict_value.items() - } - - def keys(self) -> typing.KeysView: - """ - Return all the keys in the query params. - - Usage: - - q = httpx.QueryParams("a=123&a=456&b=789") - assert list(q.keys()) == ["a", "b"] - """ - return self._dict.keys() - - def values(self) -> typing.ValuesView: - """ - Return all the values in the query params. If a key occurs more than once - only the first item for that key is returned. - - Usage: - - q = httpx.QueryParams("a=123&a=456&b=789") - assert list(q.values()) == ["123", "789"] - """ - return {k: v[0] for k, v in self._dict.items()}.values() - - def items(self) -> typing.ItemsView: - """ - Return all items in the query params. If a key occurs more than once - only the first item for that key is returned. - - Usage: - - q = httpx.QueryParams("a=123&a=456&b=789") - assert list(q.items()) == [("a", "123"), ("b", "789")] - """ - return {k: v[0] for k, v in self._dict.items()}.items() - - def multi_items(self) -> typing.List[typing.Tuple[str, str]]: - """ - Return all items in the query params. Allow duplicate keys to occur. - - Usage: - - q = httpx.QueryParams("a=123&a=456&b=789") - assert list(q.multi_items()) == [("a", "123"), ("a", "456"), ("b", "789")] - """ - multi_items: typing.List[typing.Tuple[str, str]] = [] - for k, v in self._dict.items(): - multi_items.extend([(k, i) for i in v]) - return multi_items - - def get(self, key: typing.Any, default: typing.Any = None) -> typing.Any: - """ - Get a value from the query param for a given key. If the key occurs - more than once, then only the first value is returned. - - Usage: - - q = httpx.QueryParams("a=123&a=456&b=789") - assert q.get("a") == "123" - """ - if key in self._dict: - return self._dict[str(key)][0] - return default - - def get_list(self, key: str) -> typing.List[str]: - """ - Get all values from the query param for a given key. - - Usage: - - q = httpx.QueryParams("a=123&a=456&b=789") - assert q.get_list("a") == ["123", "456"] - """ - return list(self._dict.get(str(key), [])) - - def set(self, key: str, value: typing.Any = None) -> "QueryParams": - """ - Return a new QueryParams instance, setting the value of a key. - - Usage: - - q = httpx.QueryParams("a=123") - q = q.set("a", "456") - assert q == httpx.QueryParams("a=456") - """ - q = QueryParams() - q._dict = dict(self._dict) - q._dict[str(key)] = [primitive_value_to_str(value)] - return q - - def add(self, key: str, value: typing.Any = None) -> "QueryParams": - """ - Return a new QueryParams instance, setting or appending the value of a key. - - Usage: - - q = httpx.QueryParams("a=123") - q = q.add("a", "456") - assert q == httpx.QueryParams("a=123&a=456") - """ - q = QueryParams() - q._dict = dict(self._dict) - q._dict[str(key)] = q.get_list(key) + [primitive_value_to_str(value)] - return q - - def remove(self, key: str) -> "QueryParams": - """ - Return a new QueryParams instance, removing the value of a key. - - Usage: - - q = httpx.QueryParams("a=123") - q = q.remove("a") - assert q == httpx.QueryParams("") - """ - q = QueryParams() - q._dict = dict(self._dict) - q._dict.pop(str(key), None) - return q - - def merge(self, params: QueryParamTypes = None) -> "QueryParams": - """ - Return a new QueryParams instance, updated with. - - Usage: - - q = httpx.QueryParams("a=123") - q = q.merge({"b": "456"}) - assert q == httpx.QueryParams("a=123&b=456") - - q = httpx.QueryParams("a=123") - q = q.merge({"a": "456", "b": "789"}) - assert q == httpx.QueryParams("a=456&b=789") - """ - q = QueryParams(params) - q._dict = {**self._dict, **q._dict} - return q - - def __getitem__(self, key: typing.Any) -> str: - return self._dict[key][0] - - def __contains__(self, key: typing.Any) -> bool: - return key in self._dict - - def __iter__(self) -> typing.Iterator[typing.Any]: - return iter(self.keys()) - - def __len__(self) -> int: - return len(self._dict) - - def __bool__(self) -> bool: - return bool(self._dict) - - def __hash__(self) -> int: - return hash(str(self)) - - def __eq__(self, other: typing.Any) -> bool: - if not isinstance(other, self.__class__): - return False - return sorted(self.multi_items()) == sorted(other.multi_items()) - - def __str__(self) -> str: - return urlencode(self.multi_items()) - - def __repr__(self) -> str: - class_name = self.__class__.__name__ - query_string = str(self) - return f"{class_name}({query_string!r})" - - def update(self, params: QueryParamTypes = None) -> None: - raise RuntimeError( - "QueryParams are immutable since 0.18.0. " - "Use `q = q.merge(...)` to create an updated copy." - ) - - def __setitem__(self, key: str, value: str) -> None: - raise RuntimeError( - "QueryParams are immutable since 0.18.0. " - "Use `q = q.set(key, value)` to create an updated copy." - ) - - class Headers(typing.MutableMapping[str, str]): """ HTTP headers, as a case-insensitive multi-dict. """ - def __init__(self, headers: HeaderTypes = None, encoding: str = None) -> None: + def __init__( + self, + headers: typing.Optional[HeaderTypes] = None, + encoding: typing.Optional[str] = None, + ) -> None: if headers is None: self._list = [] # type: typing.List[typing.Tuple[bytes, bytes, bytes]] elif isinstance(headers, Headers): self._list = list(headers._list) - elif isinstance(headers, dict): + elif isinstance(headers, Mapping): self._list = [ ( normalize_header_key(k, lower=False, encoding=encoding), @@ -905,7 +141,7 @@ def values(self) -> typing.ValuesView[str]: def items(self) -> typing.ItemsView[str, str]: """ Return `(key, value)` items of headers. Concatenate headers - into a single comma seperated value when a key occurs multiple times. + into a single comma separated value when a key occurs multiple times. """ values_dict: typing.Dict[str, str] = {} for _, key, value in self._list: @@ -920,8 +156,8 @@ def items(self) -> typing.ItemsView[str, str]: def multi_items(self) -> typing.List[typing.Tuple[str, str]]: """ Return a list of `(key, value)` pairs of headers. Allow multiple - occurences of the same key without concatenating into a single - comma seperated value. + occurrences of the same key without concatenating into a single + comma separated value. """ return [ (key.decode(self.encoding), value.decode(self.encoding)) @@ -930,7 +166,7 @@ def multi_items(self) -> typing.List[typing.Tuple[str, str]]: def get(self, key: str, default: typing.Any = None) -> typing.Any: """ - Return a header value. If multiple occurences of the header occur + Return a header value. If multiple occurrences of the header occur then concatenate them together with commas. """ try: @@ -941,7 +177,7 @@ def get(self, key: str, default: typing.Any = None) -> typing.Any: def get_list(self, key: str, split_commas: bool = False) -> typing.List[str]: """ Return a list of all header values for a given key. - If `split_commas=True` is passed, then any comma seperated header + If `split_commas=True` is passed, then any comma separated header values are split into multiple return strings. """ get_header_key = key.lower().encode(self.encoding) @@ -960,10 +196,12 @@ def get_list(self, key: str, split_commas: bool = False) -> typing.List[str]: split_values.extend([item.strip() for item in value.split(",")]) return split_values - def update(self, headers: HeaderTypes = None) -> None: # type: ignore + def update(self, headers: typing.Optional[HeaderTypes] = None) -> None: # type: ignore headers = Headers(headers) - for key, value in headers.raw: - self[key.decode(headers.encoding)] = value.decode(headers.encoding) + for key in headers.keys(): + if key in self: + self.pop(key) + self._list.extend(headers._list) def copy(self) -> "Headers": return Headers(self, encoding=self.encoding) @@ -977,10 +215,11 @@ def __getitem__(self, key: str) -> str: """ normalized_key = key.lower().encode(self.encoding) - items = [] - for _, header_key, header_value in self._list: - if header_key == normalized_key: - items.append(header_value.decode(self.encoding)) + items = [ + header_value.decode(self.encoding) + for _, header_key, header_value in self._list + if header_key == normalized_key + ] if items: return ", ".join(items) @@ -996,10 +235,11 @@ def __setitem__(self, key: str, value: str) -> None: set_value = value.encode(self._encoding or "utf-8") lookup_key = set_key.lower() - found_indexes = [] - for idx, (_, item_key, _) in enumerate(self._list): - if item_key == lookup_key: - found_indexes.append(idx) + found_indexes = [ + idx + for idx, (_, item_key, _) in enumerate(self._list) + if item_key == lookup_key + ] for idx in reversed(found_indexes[1:]): del self._list[idx] @@ -1016,10 +256,11 @@ def __delitem__(self, key: str) -> None: """ del_key = key.lower().encode(self.encoding) - pop_indexes = [] - for idx, (_, item_key, _) in enumerate(self._list): - if item_key.lower() == del_key: - pop_indexes.append(idx) + pop_indexes = [ + idx + for idx, (_, item_key, _) in enumerate(self._list) + if item_key.lower() == del_key + ] if not pop_indexes: raise KeyError(key) @@ -1067,30 +308,45 @@ class Request: def __init__( self, method: typing.Union[str, bytes], - url: typing.Union["URL", str, RawURL], + url: typing.Union["URL", str], *, - params: QueryParamTypes = None, - headers: HeaderTypes = None, - cookies: CookieTypes = None, - content: RequestContent = None, - data: RequestData = None, - files: RequestFiles = None, - json: typing.Any = None, - stream: typing.Union[SyncByteStream, AsyncByteStream] = None, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, + content: typing.Optional[RequestContent] = None, + data: typing.Optional[RequestData] = None, + files: typing.Optional[RequestFiles] = None, + json: typing.Optional[typing.Any] = None, + stream: typing.Union[SyncByteStream, AsyncByteStream, None] = None, + extensions: typing.Optional[RequestExtensions] = None, ): - if isinstance(method, bytes): - self.method = method.decode("ascii").upper() - else: - self.method = method.upper() + self.method = ( + method.decode("ascii").upper() + if isinstance(method, bytes) + else method.upper() + ) self.url = URL(url) if params is not None: self.url = self.url.copy_merge_params(params=params) self.headers = Headers(headers) + self.extensions = {} if extensions is None else extensions + if cookies: Cookies(cookies).set_cookie_header(self) if stream is None: - headers, stream = encode_request(content, data, files, json) + content_type: typing.Optional[str] = self.headers.get("content-type") + headers, stream = encode_request( + content=content, + data=data, + files=files, + json=json, + boundary=get_multipart_boundary_from_content_type( + content_type=content_type.encode(self.headers.encoding) + if content_type + else None + ), + ) self._prepare(headers) self.stream = stream # Load the request body, except for streaming content. @@ -1176,12 +432,13 @@ def __getstate__(self) -> typing.Dict[str, typing.Any]: return { name: value for name, value in self.__dict__.items() - if name not in ["stream"] + if name not in ["extensions", "stream"] } def __setstate__(self, state: typing.Dict[str, typing.Any]) -> None: for name, value in state.items(): setattr(self, name, value) + self.extensions = {} self.stream = UnattachedStream() @@ -1190,31 +447,34 @@ def __init__( self, status_code: int, *, - headers: HeaderTypes = None, - content: ResponseContent = None, - text: str = None, - html: str = None, + headers: typing.Optional[HeaderTypes] = None, + content: typing.Optional[ResponseContent] = None, + text: typing.Optional[str] = None, + html: typing.Optional[str] = None, json: typing.Any = None, - stream: typing.Union[SyncByteStream, AsyncByteStream] = None, - request: Request = None, - extensions: dict = None, - history: typing.List["Response"] = None, + stream: typing.Union[SyncByteStream, AsyncByteStream, None] = None, + request: typing.Optional[Request] = None, + extensions: typing.Optional[ResponseExtensions] = None, + history: typing.Optional[typing.List["Response"]] = None, + default_encoding: typing.Union[str, typing.Callable[[bytes], str]] = "utf-8", ): self.status_code = status_code self.headers = Headers(headers) self._request: typing.Optional[Request] = request - # When allow_redirects=False and a redirect is received, + # When follow_redirects=False and a redirect is received, # the client will set `response.next_request`. self.next_request: typing.Optional[Request] = None - self.extensions = {} if extensions is None else extensions + self.extensions: ResponseExtensions = {} if extensions is None else extensions self.history = [] if history is None else list(history) self.is_closed = False self.is_stream_consumed = False + self.default_encoding = default_encoding + if stream is None: headers, stream = encode_response(content, text, html, json) self._prepare(headers) @@ -1280,19 +540,23 @@ def request(self, value: Request) -> None: @property def http_version(self) -> str: try: - return self.extensions["http_version"].decode("ascii", errors="ignore") + http_version: bytes = self.extensions["http_version"] except KeyError: return "HTTP/1.1" + else: + return http_version.decode("ascii", errors="ignore") @property def reason_phrase(self) -> str: try: - return self.extensions["reason_phrase"].decode("ascii", errors="ignore") + reason_phrase: bytes = self.extensions["reason_phrase"] except KeyError: return codes.get_reason_phrase(self.status_code) + else: + return reason_phrase.decode("ascii", errors="ignore") @property - def url(self) -> typing.Optional[URL]: + def url(self) -> URL: """ Returns the URL for which the request was made. """ @@ -1311,22 +575,30 @@ def text(self) -> str: if not content: self._text = "" else: - decoder = TextDecoder(encoding=self.encoding) + decoder = TextDecoder(encoding=self.encoding or "utf-8") self._text = "".join([decoder.decode(self.content), decoder.flush()]) return self._text @property def encoding(self) -> typing.Optional[str]: """ - Return the encoding, which may have been set explicitly, or may have - been specified by the Content-Type header. + Return an encoding to use for decoding the byte content into text. + The priority for determining this is given by... + + * `.encoding = <>` has been set explicitly. + * The encoding as specified by the charset parameter in the Content-Type header. + * The encoding as determined by `default_encoding`, which may either be + a string like "utf-8" indicating the encoding to use, or may be a callable + which enables charset autodetection. """ if not hasattr(self, "_encoding"): encoding = self.charset_encoding if encoding is None or not is_known_encoding(encoding): - self._encoding = None - else: - self._encoding = encoding + if isinstance(self.default_encoding, str): + encoding = self.default_encoding + elif hasattr(self, "_content"): + encoding = self.default_encoding(self._content) + self._encoding = encoding or "utf-8" return self._encoding @encoding.setter @@ -1342,11 +614,7 @@ def charset_encoding(self) -> typing.Optional[str]: if content_type is None: return None - _, params = cgi.parse_header(content_type) - if "charset" not in params: - return None - - return params["charset"].strip("'\"") + return parse_content_type_charset(content_type) def _get_content_decoder(self) -> ContentDecoder: """ @@ -1374,22 +642,79 @@ def _get_content_decoder(self) -> ContentDecoder: return self._decoder @property - def is_error(self) -> bool: - return codes.is_error(self.status_code) + def is_informational(self) -> bool: + """ + A property which is `True` for 1xx status codes, `False` otherwise. + """ + return codes.is_informational(self.status_code) + + @property + def is_success(self) -> bool: + """ + A property which is `True` for 2xx status codes, `False` otherwise. + """ + return codes.is_success(self.status_code) @property def is_redirect(self) -> bool: - return codes.is_redirect(self.status_code) and "location" in self.headers + """ + A property which is `True` for 3xx status codes, `False` otherwise. + + Note that not all responses with a 3xx status code indicate a URL redirect. - def raise_for_status(self) -> None: + Use `response.has_redirect_location` to determine responses with a properly + formed URL redirection. """ - Raise the `HTTPStatusError` if one occurred. + return codes.is_redirect(self.status_code) + + @property + def is_client_error(self) -> bool: + """ + A property which is `True` for 4xx status codes, `False` otherwise. """ - message = ( - "{0.status_code} {error_type}: {0.reason_phrase} for url: {0.url}\n" - "For more information check: https://httpstatuses.com/{0.status_code}" + return codes.is_client_error(self.status_code) + + @property + def is_server_error(self) -> bool: + """ + A property which is `True` for 5xx status codes, `False` otherwise. + """ + return codes.is_server_error(self.status_code) + + @property + def is_error(self) -> bool: + """ + A property which is `True` for 4xx and 5xx status codes, `False` otherwise. + """ + return codes.is_error(self.status_code) + + @property + def has_redirect_location(self) -> bool: + """ + Returns True for 3xx responses with a properly formed URL redirection, + `False` otherwise. + """ + return ( + self.status_code + in ( + # 301 (Cacheable redirect. Method may change to GET.) + codes.MOVED_PERMANENTLY, + # 302 (Uncacheable redirect. Method may change to GET.) + codes.FOUND, + # 303 (Client should make a GET or HEAD request.) + codes.SEE_OTHER, + # 307 (Equiv. 302, but retain method) + codes.TEMPORARY_REDIRECT, + # 308 (Equiv. 301, but retain method) + codes.PERMANENT_REDIRECT, + ) + and "Location" in self.headers ) + def raise_for_status(self) -> "Response": + """ + Raise the `HTTPStatusError` if one occurred. + """ request = self._request if request is None: raise RuntimeError( @@ -1397,21 +722,37 @@ def raise_for_status(self) -> None: "instance has not been set on this response." ) - if codes.is_client_error(self.status_code): - message = message.format(self, error_type="Client Error") - raise HTTPStatusError(message, request=request, response=self) - elif codes.is_server_error(self.status_code): - message = message.format(self, error_type="Server Error") - raise HTTPStatusError(message, request=request, response=self) + if self.is_success: + return self + + if self.has_redirect_location: + message = ( + "{error_type} '{0.status_code} {0.reason_phrase}' for url '{0.url}'\n" + "Redirect location: '{0.headers[location]}'\n" + "For more information check: https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/{0.status_code}" + ) + else: + message = ( + "{error_type} '{0.status_code} {0.reason_phrase}' for url '{0.url}'\n" + "For more information check: https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/{0.status_code}" + ) + + status_class = self.status_code // 100 + error_types = { + 1: "Informational response", + 3: "Redirect response", + 4: "Client error", + 5: "Server error", + } + error_type = error_types.get(status_class, "Invalid status code") + message = message.format(self, error_type=error_type) + raise HTTPStatusError(message, request=request, response=self) def json(self, **kwargs: typing.Any) -> typing.Any: if self.charset_encoding is None and self.content and len(self.content) > 3: encoding = guess_json_utf(self.content) if encoding is not None: - try: - return jsonlib.loads(self.content.decode(encoding), **kwargs) - except UnicodeDecodeError: - pass + return jsonlib.loads(self.content.decode(encoding), **kwargs) return jsonlib.loads(self.text, **kwargs) @property @@ -1446,13 +787,14 @@ def __getstate__(self) -> typing.Dict[str, typing.Any]: return { name: value for name, value in self.__dict__.items() - if name not in ["stream", "is_closed", "_decoder"] + if name not in ["extensions", "stream", "is_closed", "_decoder"] } def __setstate__(self, state: typing.Dict[str, typing.Any]) -> None: for name, value in state.items(): setattr(self, name, value) self.is_closed = True + self.extensions = {} self.stream = UnattachedStream() def read(self) -> bytes: @@ -1463,14 +805,16 @@ def read(self) -> bytes: self._content = b"".join(self.iter_bytes()) return self._content - def iter_bytes(self, chunk_size: int = None) -> typing.Iterator[bytes]: + def iter_bytes( + self, chunk_size: typing.Optional[int] = None + ) -> typing.Iterator[bytes]: """ A byte-iterator over the decoded response content. This allows us to handle gzip, deflate, and brotli encoded responses. """ if hasattr(self, "_content"): chunk_size = len(self._content) if chunk_size is None else chunk_size - for i in range(0, len(self._content), chunk_size): + for i in range(0, len(self._content), max(chunk_size, 1)): yield self._content[i : i + chunk_size] else: decoder = self._get_content_decoder() @@ -1482,17 +826,19 @@ def iter_bytes(self, chunk_size: int = None) -> typing.Iterator[bytes]: yield chunk decoded = decoder.flush() for chunk in chunker.decode(decoded): - yield chunk + yield chunk # pragma: no cover for chunk in chunker.flush(): yield chunk - def iter_text(self, chunk_size: int = None) -> typing.Iterator[str]: + def iter_text( + self, chunk_size: typing.Optional[int] = None + ) -> typing.Iterator[str]: """ A str-iterator over the decoded response content that handles both gzip, deflate, etc but also detects the content's string encoding. """ - decoder = TextDecoder(encoding=self.encoding) + decoder = TextDecoder(encoding=self.encoding or "utf-8") chunker = TextChunker(chunk_size=chunk_size) with request_context(request=self._request): for byte_content in self.iter_bytes(): @@ -1514,7 +860,9 @@ def iter_lines(self) -> typing.Iterator[str]: for line in decoder.flush(): yield line - def iter_raw(self, chunk_size: int = None) -> typing.Iterator[bytes]: + def iter_raw( + self, chunk_size: typing.Optional[int] = None + ) -> typing.Iterator[bytes]: """ A byte-iterator over the raw response content. """ @@ -1561,14 +909,16 @@ async def aread(self) -> bytes: self._content = b"".join([part async for part in self.aiter_bytes()]) return self._content - async def aiter_bytes(self, chunk_size: int = None) -> typing.AsyncIterator[bytes]: + async def aiter_bytes( + self, chunk_size: typing.Optional[int] = None + ) -> typing.AsyncIterator[bytes]: """ A byte-iterator over the decoded response content. This allows us to handle gzip, deflate, and brotli encoded responses. """ if hasattr(self, "_content"): chunk_size = len(self._content) if chunk_size is None else chunk_size - for i in range(0, len(self._content), chunk_size): + for i in range(0, len(self._content), max(chunk_size, 1)): yield self._content[i : i + chunk_size] else: decoder = self._get_content_decoder() @@ -1580,17 +930,19 @@ async def aiter_bytes(self, chunk_size: int = None) -> typing.AsyncIterator[byte yield chunk decoded = decoder.flush() for chunk in chunker.decode(decoded): - yield chunk + yield chunk # pragma: no cover for chunk in chunker.flush(): yield chunk - async def aiter_text(self, chunk_size: int = None) -> typing.AsyncIterator[str]: + async def aiter_text( + self, chunk_size: typing.Optional[int] = None + ) -> typing.AsyncIterator[str]: """ A str-iterator over the decoded response content that handles both gzip, deflate, etc but also detects the content's string encoding. """ - decoder = TextDecoder(encoding=self.encoding) + decoder = TextDecoder(encoding=self.encoding or "utf-8") chunker = TextChunker(chunk_size=chunk_size) with request_context(request=self._request): async for byte_content in self.aiter_bytes(): @@ -1612,7 +964,9 @@ async def aiter_lines(self) -> typing.AsyncIterator[str]: for line in decoder.flush(): yield line - async def aiter_raw(self, chunk_size: int = None) -> typing.AsyncIterator[bytes]: + async def aiter_raw( + self, chunk_size: typing.Optional[int] = None + ) -> typing.AsyncIterator[bytes]: """ A byte-iterator over the raw response content. """ @@ -1652,12 +1006,12 @@ async def aclose(self) -> None: await self.stream.aclose() -class Cookies(MutableMapping): +class Cookies(typing.MutableMapping[str, str]): """ HTTP Cookies, as a mutable mapping. """ - def __init__(self, cookies: CookieTypes = None) -> None: + def __init__(self, cookies: typing.Optional[CookieTypes] = None) -> None: if cookies is None or isinstance(cookies, dict): self.jar = CookieJar() if isinstance(cookies, dict): @@ -1717,7 +1071,11 @@ def set(self, name: str, value: str, domain: str = "", path: str = "/") -> None: self.jar.set_cookie(cookie) def get( # type: ignore - self, name: str, default: str = None, domain: str = None, path: str = None + self, + name: str, + default: typing.Optional[str] = None, + domain: typing.Optional[str] = None, + path: typing.Optional[str] = None, ) -> typing.Optional[str]: """ Get a cookie by name. May optionally include domain and path @@ -1737,7 +1095,12 @@ def get( # type: ignore return default return value - def delete(self, name: str, domain: str = None, path: str = None) -> None: + def delete( + self, + name: str, + domain: typing.Optional[str] = None, + path: typing.Optional[str] = None, + ) -> None: """ Delete a cookie by name. May optionally include domain and path in order to specify exactly which cookie to delete. @@ -1745,17 +1108,20 @@ def delete(self, name: str, domain: str = None, path: str = None) -> None: if domain is not None and path is not None: return self.jar.clear(domain, path, name) - remove = [] - for cookie in self.jar: - if cookie.name == name: - if domain is None or cookie.domain == domain: - if path is None or cookie.path == path: - remove.append(cookie) + remove = [ + cookie + for cookie in self.jar + if cookie.name == name + and (domain is None or cookie.domain == domain) + and (path is None or cookie.path == path) + ] for cookie in remove: self.jar.clear(cookie.domain, cookie.path, cookie.name) - def clear(self, domain: str = None, path: str = None) -> None: + def clear( + self, domain: typing.Optional[str] = None, path: typing.Optional[str] = None + ) -> None: """ Delete all cookies. Optionally include a domain and path in order to only delete a subset of all the cookies. @@ -1768,7 +1134,7 @@ def clear(self, domain: str = None, path: str = None) -> None: args.append(path) self.jar.clear(*args) - def update(self, cookies: CookieTypes = None) -> None: # type: ignore + def update(self, cookies: typing.Optional[CookieTypes] = None) -> None: # type: ignore cookies = Cookies(cookies) for cookie in cookies.jar: self.jar.set_cookie(cookie) diff --git a/packages/httpx/_multipart.py b/packages/httpx/_multipart.py index 36bae664e..446f4ad2d 100644 --- a/packages/httpx/_multipart.py +++ b/packages/httpx/_multipart.py @@ -1,10 +1,17 @@ import binascii +import io import os import typing from pathlib import Path -from ._transports.base import AsyncByteStream, SyncByteStream -from ._types import FileContent, FileTypes, RequestFiles +from ._types import ( + AsyncByteStream, + FileContent, + FileTypes, + RequestData, + RequestFiles, + SyncByteStream, +) from ._utils import ( format_form_param, guess_content_type, @@ -14,6 +21,20 @@ ) +def get_multipart_boundary_from_content_type( + content_type: typing.Optional[bytes], +) -> typing.Optional[bytes]: + if not content_type or not content_type.startswith(b"multipart/form-data"): + return None + # parse boundary according to + # https://www.rfc-editor.org/rfc/rfc2046#section-5.1.1 + if b";" in content_type: + for section in content_type.split(b";"): + if section.strip().lower().startswith(b"boundary="): + return section.strip()[len(b"boundary=") :].strip(b'"') + return None + + class DataField: """ A single form field item, within a multipart form field. @@ -65,40 +86,67 @@ class FileField: A single file field item, within a multipart form field. """ + CHUNK_SIZE = 64 * 1024 + def __init__(self, name: str, value: FileTypes) -> None: self.name = name fileobj: FileContent + headers: typing.Dict[str, str] = {} + content_type: typing.Optional[str] = None + + # This large tuple based API largely mirror's requests' API + # It would be good to think of better APIs for this that we could include in httpx 2.0 + # since variable length tuples (especially of 4 elements) are quite unwieldly if isinstance(value, tuple): - try: - filename, fileobj, content_type = value # type: ignore - except ValueError: + if len(value) == 2: + # neither the 3rd parameter (content_type) nor the 4th (headers) was included filename, fileobj = value # type: ignore - content_type = guess_content_type(filename) + elif len(value) == 3: + filename, fileobj, content_type = value # type: ignore + else: + # all 4 parameters included + filename, fileobj, content_type, headers = value # type: ignore else: filename = Path(str(getattr(value, "name", "upload"))).name fileobj = value + + if content_type is None: content_type = guess_content_type(filename) + has_content_type_header = any("content-type" in key.lower() for key in headers) + if content_type is not None and not has_content_type_header: + # note that unlike requests, we ignore the content_type + # provided in the 3rd tuple element if it is also included in the headers + # requests does the opposite (it overwrites the header with the 3rd tuple element) + headers["Content-Type"] = content_type + + if isinstance(fileobj, io.StringIO): + raise TypeError( + "Multipart file uploads require 'io.BytesIO', not 'io.StringIO'." + ) + if isinstance(fileobj, io.TextIOBase): + raise TypeError( + "Multipart file uploads must be opened in binary mode, not text mode." + ) + self.filename = filename self.file = fileobj - self.content_type = content_type - self._consumed = False + self.headers = headers - def get_length(self) -> int: + def get_length(self) -> typing.Optional[int]: headers = self.render_headers() if isinstance(self.file, (str, bytes)): return len(headers) + len(to_bytes(self.file)) - # Let's do our best not to read `file` into memory. file_length = peek_filelike_length(self.file) + + # If we can't determine the filesize without reading it into memory, + # then return `None` here, to indicate an unknown file length. if file_length is None: - # As a last resort, read file and cache contents for later. - assert not hasattr(self, "_data") - self._data = to_bytes(self.file.read()) - file_length = len(self._data) + return None return len(headers) + file_length @@ -111,9 +159,9 @@ def render_headers(self) -> bytes: if self.filename: filename = format_form_param("filename", self.filename) parts.extend([b"; ", filename]) - if self.content_type is not None: - content_type = self.content_type.encode() - parts.extend([b"\r\nContent-Type: ", content_type]) + for header_name, header_value in self.headers.items(): + key, val = f"\r\n{header_name}: ".encode(), header_value.encode() + parts.extend([key, val]) parts.append(b"\r\n\r\n") self._headers = b"".join(parts) @@ -124,17 +172,16 @@ def render_data(self) -> typing.Iterator[bytes]: yield to_bytes(self.file) return - if hasattr(self, "_data"): - # Already rendered. - yield self._data - return - - if self._consumed: # pragma: nocover - self.file.seek(0) - self._consumed = True + if hasattr(self.file, "seek"): + try: + self.file.seek(0) + except io.UnsupportedOperation: + pass - for chunk in self.file: + chunk = self.file.read(self.CHUNK_SIZE) + while chunk: yield to_bytes(chunk) + chunk = self.file.read(self.CHUNK_SIZE) def render(self) -> typing.Iterator[bytes]: yield self.render_headers() @@ -146,7 +193,12 @@ class MultipartStream(SyncByteStream, AsyncByteStream): Request content as streaming multipart encoded form data. """ - def __init__(self, data: dict, files: RequestFiles, boundary: bytes = None) -> None: + def __init__( + self, + data: RequestData, + files: RequestFiles, + boundary: typing.Optional[bytes] = None, + ) -> None: if boundary is None: boundary = binascii.hexlify(os.urandom(16)) @@ -157,10 +209,10 @@ def __init__(self, data: dict, files: RequestFiles, boundary: bytes = None) -> N self.fields = list(self._iter_fields(data, files)) def _iter_fields( - self, data: dict, files: RequestFiles + self, data: RequestData, files: RequestFiles ) -> typing.Iterator[typing.Union[FileField, DataField]]: for name, value in data.items(): - if isinstance(value, list): + if isinstance(value, (tuple, list)): for item in value: yield DataField(name=name, value=item) else: @@ -177,24 +229,34 @@ def iter_chunks(self) -> typing.Iterator[bytes]: yield b"\r\n" yield b"--%s--\r\n" % self.boundary - def iter_chunks_lengths(self) -> typing.Iterator[int]: + def get_content_length(self) -> typing.Optional[int]: + """ + Return the length of the multipart encoded content, or `None` if + any of the files have a length that cannot be determined upfront. + """ boundary_length = len(self.boundary) - # Follow closely what `.iter_chunks()` does. + length = 0 + for field in self.fields: - yield 2 + boundary_length + 2 - yield field.get_length() - yield 2 - yield 2 + boundary_length + 4 + field_length = field.get_length() + if field_length is None: + return None + + length += 2 + boundary_length + 2 # b"--{boundary}\r\n" + length += field_length + length += 2 # b"\r\n" - def get_content_length(self) -> int: - return sum(self.iter_chunks_lengths()) + length += 2 + boundary_length + 4 # b"--{boundary}--\r\n" + return length # Content stream interface. def get_headers(self) -> typing.Dict[str, str]: - content_length = str(self.get_content_length()) + content_length = self.get_content_length() content_type = self.content_type - return {"Content-Length": content_length, "Content-Type": content_type} + if content_length is None: + return {"Transfer-Encoding": "chunked", "Content-Type": content_type} + return {"Content-Length": str(content_length), "Content-Type": content_type} def __iter__(self) -> typing.Iterator[bytes]: for chunk in self.iter_chunks(): diff --git a/packages/httpx/_status_codes.py b/packages/httpx/_status_codes.py index 100aec641..671c30e1b 100644 --- a/packages/httpx/_status_codes.py +++ b/packages/httpx/_status_codes.py @@ -22,10 +22,10 @@ class codes(IntEnum): """ def __new__(cls, value: int, phrase: str = "") -> "codes": - obj = int.__new__(cls, value) # type: ignore + obj = int.__new__(cls, value) obj._value_ = value - obj.phrase = phrase # type: ignore + obj.phrase = phrase # type: ignore[attr-defined] return obj def __str__(self) -> str: @@ -39,32 +39,47 @@ def get_reason_phrase(cls, value: int) -> str: return "" @classmethod - def is_redirect(cls, value: int) -> bool: - return value in ( - # 301 (Cacheable redirect. Method may change to GET.) - codes.MOVED_PERMANENTLY, - # 302 (Uncacheable redirect. Method may change to GET.) - codes.FOUND, - # 303 (Client should make a GET or HEAD request.) - codes.SEE_OTHER, - # 307 (Equiv. 302, but retain method) - codes.TEMPORARY_REDIRECT, - # 308 (Equiv. 301, but retain method) - codes.PERMANENT_REDIRECT, - ) + def is_informational(cls, value: int) -> bool: + """ + Returns `True` for 1xx status codes, `False` otherwise. + """ + return 100 <= value <= 199 @classmethod - def is_error(cls, value: int) -> bool: - return 400 <= value <= 599 + def is_success(cls, value: int) -> bool: + """ + Returns `True` for 2xx status codes, `False` otherwise. + """ + return 200 <= value <= 299 + + @classmethod + def is_redirect(cls, value: int) -> bool: + """ + Returns `True` for 3xx status codes, `False` otherwise. + """ + return 300 <= value <= 399 @classmethod def is_client_error(cls, value: int) -> bool: + """ + Returns `True` for 4xx status codes, `False` otherwise. + """ return 400 <= value <= 499 @classmethod def is_server_error(cls, value: int) -> bool: + """ + Returns `True` for 5xx status codes, `False` otherwise. + """ return 500 <= value <= 599 + @classmethod + def is_error(cls, value: int) -> bool: + """ + Returns `True` for 4xx or 5xx status codes, `False` otherwise. + """ + return 400 <= value <= 599 + # informational CONTINUE = 100, "Continue" SWITCHING_PROTOCOLS = 101, "Switching Protocols" diff --git a/packages/httpx/_transports/asgi.py b/packages/httpx/_transports/asgi.py index 24c5452dc..f67f0fbd5 100644 --- a/packages/httpx/_transports/asgi.py +++ b/packages/httpx/_transports/asgi.py @@ -1,9 +1,10 @@ import typing -from urllib.parse import unquote import sniffio -from .base import AsyncBaseTransport, AsyncByteStream +from .._models import Request, Response +from .._types import AsyncByteStream +from .base import AsyncBaseTransport if typing.TYPE_CHECKING: # pragma: no cover import asyncio @@ -13,6 +14,16 @@ Event = typing.Union[asyncio.Event, trio.Event] +_Message = typing.Dict[str, typing.Any] +_Receive = typing.Callable[[], typing.Awaitable[_Message]] +_Send = typing.Callable[ + [typing.Dict[str, typing.Any]], typing.Coroutine[None, None, None] +] +_ASGIApp = typing.Callable[ + [typing.Dict[str, typing.Any], _Receive, _Send], typing.Coroutine[None, None, None] +] + + def create_event() -> "Event": if sniffio.current_async_library() == "trio": import trio @@ -67,7 +78,7 @@ class ASGITransport(AsyncBaseTransport): def __init__( self, - app: typing.Callable, + app: _ASGIApp, raise_app_exceptions: bool = True, root_path: str = "", client: typing.Tuple[str, int] = ("127.0.0.1", 123), @@ -79,34 +90,28 @@ def __init__( async def handle_async_request( self, - method: bytes, - url: typing.Tuple[bytes, bytes, typing.Optional[int], bytes], - headers: typing.List[typing.Tuple[bytes, bytes]], - stream: AsyncByteStream, - extensions: dict, - ) -> typing.Tuple[ - int, typing.List[typing.Tuple[bytes, bytes]], AsyncByteStream, dict - ]: + request: Request, + ) -> Response: + assert isinstance(request.stream, AsyncByteStream) + # ASGI scope. - scheme, host, port, full_path = url - path, _, query = full_path.partition(b"?") scope = { "type": "http", "asgi": {"version": "3.0"}, "http_version": "1.1", - "method": method.decode(), - "headers": [(k.lower(), v) for (k, v) in headers], - "scheme": scheme.decode("ascii"), - "path": unquote(path.decode("ascii")), - "raw_path": path, - "query_string": query, - "server": (host.decode("ascii"), port), + "method": request.method, + "headers": [(k.lower(), v) for (k, v) in request.headers.raw], + "scheme": request.url.scheme, + "path": request.url.path, + "raw_path": request.url.raw_path, + "query_string": request.url.query, + "server": (request.url.host, request.url.port), "client": self.client, "root_path": self.root_path, } # Request. - request_body_chunks = stream.__aiter__() + request_body_chunks = request.stream.__aiter__() request_complete = False # Response. @@ -118,7 +123,7 @@ async def handle_async_request( # ASGI callables. - async def receive() -> dict: + async def receive() -> typing.Dict[str, typing.Any]: nonlocal request_complete if request_complete: @@ -132,7 +137,7 @@ async def receive() -> dict: return {"type": "http.request", "body": b"", "more_body": False} return {"type": "http.request", "body": body, "more_body": True} - async def send(message: dict) -> None: + async def send(message: typing.Dict[str, typing.Any]) -> None: nonlocal status_code, response_headers, response_started if message["type"] == "http.response.start": @@ -147,7 +152,7 @@ async def send(message: dict) -> None: body = message.get("body", b"") more_body = message.get("more_body", False) - if body and method != b"HEAD": + if body and request.method != "HEAD": body_parts.append(body) if not more_body: @@ -155,15 +160,20 @@ async def send(message: dict) -> None: try: await self.app(scope, receive, send) - except Exception: - if self.raise_app_exceptions or not response_complete.is_set(): + except Exception: # noqa: PIE-786 + if self.raise_app_exceptions: raise + response_complete.set() + if status_code is None: + status_code = 500 + if response_headers is None: + response_headers = {} + assert response_complete.is_set() assert status_code is not None assert response_headers is not None stream = ASGIResponseStream(body_parts) - extensions = {} - return (status_code, response_headers, stream, extensions) + return Response(status_code, headers=response_headers, stream=stream) diff --git a/packages/httpx/_transports/base.py b/packages/httpx/_transports/base.py index eb5192697..f6fdfe694 100644 --- a/packages/httpx/_transports/base.py +++ b/packages/httpx/_transports/base.py @@ -1,153 +1,58 @@ import typing from types import TracebackType +from .._models import Request, Response + T = typing.TypeVar("T", bound="BaseTransport") A = typing.TypeVar("A", bound="AsyncBaseTransport") -class SyncByteStream: - def __iter__(self) -> typing.Iterator[bytes]: - raise NotImplementedError( - "The '__iter__' method must be implemented." - ) # pragma: nocover - yield b"" # pragma: nocover - - def close(self) -> None: - """ - Subclasses can override this method to release any network resources - after a request/response cycle is complete. - - Streaming cases should use a `try...finally` block to ensure that - the stream `close()` method is always called. - - Example: - - status_code, headers, stream, extensions = transport.handle_request(...) - try: - ... - finally: - stream.close() - """ - - def read(self) -> bytes: - """ - Simple cases can use `.read()` as a convience method for consuming - the entire stream and then closing it. - - Example: - - status_code, headers, stream, extensions = transport.handle_request(...) - body = stream.read() - """ - try: - return b"".join([part for part in self]) - finally: - self.close() - - -class AsyncByteStream: - async def __aiter__(self) -> typing.AsyncIterator[bytes]: - raise NotImplementedError( - "The '__aiter__' method must be implemented." - ) # pragma: nocover - yield b"" # pragma: nocover - - async def aclose(self) -> None: - pass - - async def aread(self) -> bytes: - try: - return b"".join([part async for part in self]) - finally: - await self.aclose() - - class BaseTransport: def __enter__(self: T) -> T: return self def __exit__( self, - exc_type: typing.Type[BaseException] = None, - exc_value: BaseException = None, - traceback: TracebackType = None, + exc_type: typing.Optional[typing.Type[BaseException]] = None, + exc_value: typing.Optional[BaseException] = None, + traceback: typing.Optional[TracebackType] = None, ) -> None: self.close() - def handle_request( - self, - method: bytes, - url: typing.Tuple[bytes, bytes, typing.Optional[int], bytes], - headers: typing.List[typing.Tuple[bytes, bytes]], - stream: SyncByteStream, - extensions: dict, - ) -> typing.Tuple[ - int, typing.List[typing.Tuple[bytes, bytes]], SyncByteStream, dict - ]: + def handle_request(self, request: Request) -> Response: """ Send a single HTTP request and return a response. - At this layer of API we're simply using plain primitives. No `Request` or - `Response` models, no fancy `URL` or `Header` handling. This strict point - of cut-off provides a clear design seperation between the HTTPX API, - and the low-level network handling. - Developers shouldn't typically ever need to call into this API directly, since the Client class provides all the higher level user-facing API niceties. - In order to properly release any network resources, the response stream - should *either* be consumed immediately, with a call to `stream.read()`, - or else the `handle_request` call should be followed with a try/finally - block to ensuring the stream is always closed. + In order to properly release any network resources, the response + stream should *either* be consumed immediately, with a call to + `response.stream.read()`, or else the `handle_request` call should + be followed with a try/finally block to ensuring the stream is + always closed. Example usage: with httpx.HTTPTransport() as transport: - status_code, headers, stream, extensions = transport.handle_request( - method=b'GET', - url=(b'https', b'www.example.com', 443, b'/'), - headers=[(b'Host', b'www.example.com')], - stream=[], - extensions={} + req = httpx.Request( + method=b"GET", + url=(b"https", b"www.example.com", 443, b"/"), + headers=[(b"Host", b"www.example.com")], ) - body = stream.read() - print(status_code, headers, body) - - Arguments: - - method: The request method as bytes. Eg. b'GET'. - url: The components of the request URL, as a tuple of `(scheme, host, port, target)`. - The target will usually be the URL path, but also allows for alternative - formulations, such as proxy requests which include the complete URL in - the target portion of the HTTP request, or for "OPTIONS *" requests, which - cannot be expressed in a URL string. - headers: The request headers as a list of byte pairs. - stream: The request body as a bytes iterator. - extensions: An open ended dictionary, including optional extensions to the - core request/response API. Keys may include: - timeout: A dictionary of str:Optional[float] timeout values. - May include values for 'connect', 'read', 'write', or 'pool'. - - Returns a tuple of: - - status_code: The response status code as an integer. Should be in the range 1xx-5xx. - headers: The response headers as a list of byte pairs. - stream: The response body as a bytes iterator. - extensions: An open ended dictionary, including optional extensions to the - core request/response API. Keys are plain strings, and may include: - reason_phrase: The reason-phrase of the HTTP response, as bytes. Eg b'OK'. - HTTP/2 onwards does not include a reason phrase on the wire. - When no key is included, a default based on the status code may - be used. An empty-string reason phrase should not be substituted - for a default, as it indicates the server left the portion blank - eg. the leading response bytes were b"HTTP/1.1 200 ". - http_version: The HTTP version, as bytes. Eg. b"HTTP/1.1". - When no http_version key is included, HTTP/1.1 may be assumed. + resp = transport.handle_request(req) + body = resp.stream.read() + print(resp.status_code, resp.headers, body) + + + Takes a `Request` instance as the only argument. + + Returns a `Response` instance. """ raise NotImplementedError( "The 'handle_request' method must be implemented." - ) # pragma: nocover + ) # pragma: no cover def close(self) -> None: pass @@ -159,25 +64,19 @@ async def __aenter__(self: A) -> A: async def __aexit__( self, - exc_type: typing.Type[BaseException] = None, - exc_value: BaseException = None, - traceback: TracebackType = None, + exc_type: typing.Optional[typing.Type[BaseException]] = None, + exc_value: typing.Optional[BaseException] = None, + traceback: typing.Optional[TracebackType] = None, ) -> None: await self.aclose() async def handle_async_request( self, - method: bytes, - url: typing.Tuple[bytes, bytes, typing.Optional[int], bytes], - headers: typing.List[typing.Tuple[bytes, bytes]], - stream: AsyncByteStream, - extensions: dict, - ) -> typing.Tuple[ - int, typing.List[typing.Tuple[bytes, bytes]], AsyncByteStream, dict - ]: + request: Request, + ) -> Response: raise NotImplementedError( "The 'handle_async_request' method must be implemented." - ) # pragma: nocover + ) # pragma: no cover async def aclose(self) -> None: pass diff --git a/packages/httpx/_transports/default.py b/packages/httpx/_transports/default.py index ae6c2d177..7dba5b820 100644 --- a/packages/httpx/_transports/default.py +++ b/packages/httpx/_transports/default.py @@ -6,11 +6,10 @@ * uds: str * local_address: str * retries: int -* backend: str ("auto", "asyncio", "trio", "curio", "anyio", "sync") Example usages... -# Disable HTTP/2 on a single specfic domain. +# Disable HTTP/2 on a single specific domain. mounts = { "all://": httpx.HTTPTransport(http2=True), "all://*example.org": httpx.HTTPTransport() @@ -32,7 +31,6 @@ from .._config import DEFAULT_LIMITS, Limits, Proxy, create_ssl_context from .._exceptions import ( - CloseError, ConnectError, ConnectTimeout, LocalProtocolError, @@ -48,18 +46,25 @@ WriteError, WriteTimeout, ) -from .._types import CertTypes, VerifyTypes -from .base import AsyncBaseTransport, AsyncByteStream, BaseTransport, SyncByteStream +from .._models import Request, Response +from .._types import AsyncByteStream, CertTypes, SyncByteStream, VerifyTypes +from .base import AsyncBaseTransport, BaseTransport T = typing.TypeVar("T", bound="HTTPTransport") A = typing.TypeVar("A", bound="AsyncHTTPTransport") +SOCKET_OPTION = typing.Union[ + typing.Tuple[int, int, int], + typing.Tuple[int, int, typing.Union[bytes, bytearray]], + typing.Tuple[int, int, None, int], +] + @contextlib.contextmanager def map_httpcore_exceptions() -> typing.Iterator[None]: try: yield - except Exception as exc: + except Exception as exc: # noqa: PIE-786 mapped_exc = None for from_exc, to_exc in HTTPCORE_EXC_MAP.items(): @@ -71,7 +76,7 @@ def map_httpcore_exceptions() -> typing.Iterator[None]: if mapped_exc is None or issubclass(to_exc, mapped_exc): mapped_exc = to_exc - if mapped_exc is None: # pragma: nocover + if mapped_exc is None: # pragma: no cover raise message = str(exc) @@ -88,7 +93,6 @@ def map_httpcore_exceptions() -> typing.Iterator[None]: httpcore.ConnectError: ConnectError, httpcore.ReadError: ReadError, httpcore.WriteError: WriteError, - httpcore.CloseError: CloseError, httpcore.ProxyError: ProxyError, httpcore.UnsupportedProtocol: UnsupportedProtocol, httpcore.ProtocolError: ProtocolError, @@ -98,7 +102,7 @@ def map_httpcore_exceptions() -> typing.Iterator[None]: class ResponseStream(SyncByteStream): - def __init__(self, httpcore_stream: httpcore.SyncByteStream): + def __init__(self, httpcore_stream: typing.Iterable[bytes]): self._httpcore_stream = httpcore_stream def __iter__(self) -> typing.Iterator[bytes]: @@ -107,7 +111,7 @@ def __iter__(self) -> typing.Iterator[bytes]: yield part def close(self) -> None: - with map_httpcore_exceptions(): + if hasattr(self._httpcore_stream, "close"): self._httpcore_stream.close() @@ -115,21 +119,21 @@ class HTTPTransport(BaseTransport): def __init__( self, verify: VerifyTypes = True, - cert: CertTypes = None, + cert: typing.Optional[CertTypes] = None, http1: bool = True, http2: bool = False, limits: Limits = DEFAULT_LIMITS, trust_env: bool = True, - proxy: Proxy = None, - uds: str = None, - local_address: str = None, + proxy: typing.Optional[Proxy] = None, + uds: typing.Optional[str] = None, + local_address: typing.Optional[str] = None, retries: int = 0, - backend: str = "sync", + socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, ) -> None: ssl_context = create_ssl_context(verify=verify, cert=cert, trust_env=trust_env) if proxy is None: - self._pool = httpcore.SyncConnectionPool( + self._pool = httpcore.ConnectionPool( ssl_context=ssl_context, max_connections=limits.max_connections, max_keepalive_connections=limits.max_keepalive_connections, @@ -139,19 +143,54 @@ def __init__( uds=uds, local_address=local_address, retries=retries, - backend=backend, + socket_options=socket_options, ) - else: - self._pool = httpcore.SyncHTTPProxy( - proxy_url=proxy.url.raw, + elif proxy.url.scheme in ("http", "https"): + self._pool = httpcore.HTTPProxy( + proxy_url=httpcore.URL( + scheme=proxy.url.raw_scheme, + host=proxy.url.raw_host, + port=proxy.url.port, + target=proxy.url.raw_path, + ), + proxy_auth=proxy.raw_auth, proxy_headers=proxy.headers.raw, - proxy_mode=proxy.mode, + ssl_context=ssl_context, + proxy_ssl_context=proxy.ssl_context, + max_connections=limits.max_connections, + max_keepalive_connections=limits.max_keepalive_connections, + keepalive_expiry=limits.keepalive_expiry, + http1=http1, + http2=http2, + socket_options=socket_options, + ) + elif proxy.url.scheme == "socks5": + try: + import socksio # noqa + except ImportError: # pragma: no cover + raise ImportError( + "Using SOCKS proxy, but the 'socksio' package is not installed. " + "Make sure to install httpx using `pip install httpx[socks]`." + ) from None + + self._pool = httpcore.SOCKSProxy( + proxy_url=httpcore.URL( + scheme=proxy.url.raw_scheme, + host=proxy.url.raw_host, + port=proxy.url.port, + target=proxy.url.raw_path, + ), + proxy_auth=proxy.raw_auth, ssl_context=ssl_context, max_connections=limits.max_connections, max_keepalive_connections=limits.max_keepalive_connections, keepalive_expiry=limits.keepalive_expiry, + http1=http1, http2=http2, - backend=backend, + ) + else: # pragma: no cover + raise ValueError( + f"Proxy protocol must be either 'http', 'https', or 'socks5', but got {proxy.url.scheme!r}." ) def __enter__(self: T) -> T: # Use generics for subclass support. @@ -160,42 +199,49 @@ def __enter__(self: T) -> T: # Use generics for subclass support. def __exit__( self, - exc_type: typing.Type[BaseException] = None, - exc_value: BaseException = None, - traceback: TracebackType = None, + exc_type: typing.Optional[typing.Type[BaseException]] = None, + exc_value: typing.Optional[BaseException] = None, + traceback: typing.Optional[TracebackType] = None, ) -> None: with map_httpcore_exceptions(): self._pool.__exit__(exc_type, exc_value, traceback) def handle_request( self, - method: bytes, - url: typing.Tuple[bytes, bytes, typing.Optional[int], bytes], - headers: typing.List[typing.Tuple[bytes, bytes]], - stream: SyncByteStream, - extensions: dict, - ) -> typing.Tuple[ - int, typing.List[typing.Tuple[bytes, bytes]], SyncByteStream, dict - ]: + request: Request, + ) -> Response: + assert isinstance(request.stream, SyncByteStream) + + req = httpcore.Request( + method=request.method, + url=httpcore.URL( + scheme=request.url.raw_scheme, + host=request.url.raw_host, + port=request.url.port, + target=request.url.raw_path, + ), + headers=request.headers.raw, + content=request.stream, + extensions=request.extensions, + ) with map_httpcore_exceptions(): - status_code, headers, byte_stream, extensions = self._pool.handle_request( - method=method, - url=url, - headers=headers, - stream=httpcore.IteratorByteStream(iter(stream)), - extensions=extensions, - ) + resp = self._pool.handle_request(req) - stream = ResponseStream(byte_stream) + assert isinstance(resp.stream, typing.Iterable) - return status_code, headers, stream, extensions + return Response( + status_code=resp.status, + headers=resp.headers, + stream=ResponseStream(resp.stream), + extensions=resp.extensions, + ) def close(self) -> None: self._pool.close() class AsyncResponseStream(AsyncByteStream): - def __init__(self, httpcore_stream: httpcore.AsyncByteStream): + def __init__(self, httpcore_stream: typing.AsyncIterable[bytes]): self._httpcore_stream = httpcore_stream async def __aiter__(self) -> typing.AsyncIterator[bytes]: @@ -204,7 +250,7 @@ async def __aiter__(self) -> typing.AsyncIterator[bytes]: yield part async def aclose(self) -> None: - with map_httpcore_exceptions(): + if hasattr(self._httpcore_stream, "aclose"): await self._httpcore_stream.aclose() @@ -212,16 +258,16 @@ class AsyncHTTPTransport(AsyncBaseTransport): def __init__( self, verify: VerifyTypes = True, - cert: CertTypes = None, + cert: typing.Optional[CertTypes] = None, http1: bool = True, http2: bool = False, limits: Limits = DEFAULT_LIMITS, trust_env: bool = True, - proxy: Proxy = None, - uds: str = None, - local_address: str = None, + proxy: typing.Optional[Proxy] = None, + uds: typing.Optional[str] = None, + local_address: typing.Optional[str] = None, retries: int = 0, - backend: str = "auto", + socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, ) -> None: ssl_context = create_ssl_context(verify=verify, cert=cert, trust_env=trust_env) @@ -236,19 +282,53 @@ def __init__( uds=uds, local_address=local_address, retries=retries, - backend=backend, + socket_options=socket_options, ) - else: + elif proxy.url.scheme in ("http", "https"): self._pool = httpcore.AsyncHTTPProxy( - proxy_url=proxy.url.raw, + proxy_url=httpcore.URL( + scheme=proxy.url.raw_scheme, + host=proxy.url.raw_host, + port=proxy.url.port, + target=proxy.url.raw_path, + ), + proxy_auth=proxy.raw_auth, proxy_headers=proxy.headers.raw, - proxy_mode=proxy.mode, ssl_context=ssl_context, max_connections=limits.max_connections, max_keepalive_connections=limits.max_keepalive_connections, keepalive_expiry=limits.keepalive_expiry, + http1=http1, http2=http2, - backend=backend, + socket_options=socket_options, + ) + elif proxy.url.scheme == "socks5": + try: + import socksio # noqa + except ImportError: # pragma: no cover + raise ImportError( + "Using SOCKS proxy, but the 'socksio' package is not installed. " + "Make sure to install httpx using `pip install httpx[socks]`." + ) from None + + self._pool = httpcore.AsyncSOCKSProxy( + proxy_url=httpcore.URL( + scheme=proxy.url.raw_scheme, + host=proxy.url.raw_host, + port=proxy.url.port, + target=proxy.url.raw_path, + ), + proxy_auth=proxy.raw_auth, + ssl_context=ssl_context, + max_connections=limits.max_connections, + max_keepalive_connections=limits.max_keepalive_connections, + keepalive_expiry=limits.keepalive_expiry, + http1=http1, + http2=http2, + ) + else: # pragma: no cover + raise ValueError( + f"Proxy protocol must be either 'http', 'https', or 'socks5', but got {proxy.url.scheme!r}." ) async def __aenter__(self: A) -> A: # Use generics for subclass support. @@ -257,40 +337,42 @@ async def __aenter__(self: A) -> A: # Use generics for subclass support. async def __aexit__( self, - exc_type: typing.Type[BaseException] = None, - exc_value: BaseException = None, - traceback: TracebackType = None, + exc_type: typing.Optional[typing.Type[BaseException]] = None, + exc_value: typing.Optional[BaseException] = None, + traceback: typing.Optional[TracebackType] = None, ) -> None: with map_httpcore_exceptions(): await self._pool.__aexit__(exc_type, exc_value, traceback) async def handle_async_request( self, - method: bytes, - url: typing.Tuple[bytes, bytes, typing.Optional[int], bytes], - headers: typing.List[typing.Tuple[bytes, bytes]], - stream: AsyncByteStream, - extensions: dict, - ) -> typing.Tuple[ - int, typing.List[typing.Tuple[bytes, bytes]], AsyncByteStream, dict - ]: + request: Request, + ) -> Response: + assert isinstance(request.stream, AsyncByteStream) + + req = httpcore.Request( + method=request.method, + url=httpcore.URL( + scheme=request.url.raw_scheme, + host=request.url.raw_host, + port=request.url.port, + target=request.url.raw_path, + ), + headers=request.headers.raw, + content=request.stream, + extensions=request.extensions, + ) with map_httpcore_exceptions(): - ( - status_code, - headers, - byte_stream, - extensions, - ) = await self._pool.handle_async_request( - method=method, - url=url, - headers=headers, - stream=httpcore.AsyncIteratorByteStream(stream.__aiter__()), - extensions=extensions, - ) + resp = await self._pool.handle_async_request(req) - stream = AsyncResponseStream(byte_stream) + assert isinstance(resp.stream, typing.AsyncIterable) - return status_code, headers, stream, extensions + return Response( + status_code=resp.status, + headers=resp.headers, + stream=AsyncResponseStream(resp.stream), + extensions=resp.extensions, + ) async def aclose(self) -> None: await self._pool.aclose() diff --git a/packages/httpx/_transports/mock.py b/packages/httpx/_transports/mock.py index 8d59b7382..82043da2d 100644 --- a/packages/httpx/_transports/mock.py +++ b/packages/httpx/_transports/mock.py @@ -1,70 +1,38 @@ -import asyncio import typing -from .._models import Request -from .base import AsyncBaseTransport, AsyncByteStream, BaseTransport, SyncByteStream +from .._models import Request, Response +from .base import AsyncBaseTransport, BaseTransport + +SyncHandler = typing.Callable[[Request], Response] +AsyncHandler = typing.Callable[[Request], typing.Coroutine[None, None, Response]] class MockTransport(AsyncBaseTransport, BaseTransport): - def __init__(self, handler: typing.Callable) -> None: + def __init__(self, handler: typing.Union[SyncHandler, AsyncHandler]) -> None: self.handler = handler def handle_request( self, - method: bytes, - url: typing.Tuple[bytes, bytes, typing.Optional[int], bytes], - headers: typing.List[typing.Tuple[bytes, bytes]], - stream: SyncByteStream, - extensions: dict, - ) -> typing.Tuple[ - int, typing.List[typing.Tuple[bytes, bytes]], SyncByteStream, dict - ]: - request = Request( - method=method, - url=url, - headers=headers, - stream=stream, - ) + request: Request, + ) -> Response: request.read() response = self.handler(request) - return ( - response.status_code, - response.headers.raw, - response.stream, - response.extensions, - ) + if not isinstance(response, Response): # pragma: no cover + raise TypeError("Cannot use an async handler in a sync Client") + return response async def handle_async_request( self, - method: bytes, - url: typing.Tuple[bytes, bytes, typing.Optional[int], bytes], - headers: typing.List[typing.Tuple[bytes, bytes]], - stream: AsyncByteStream, - extensions: dict, - ) -> typing.Tuple[ - int, typing.List[typing.Tuple[bytes, bytes]], AsyncByteStream, dict - ]: - request = Request( - method=method, - url=url, - headers=headers, - stream=stream, - ) + request: Request, + ) -> Response: await request.aread() - response = self.handler(request) # Allow handler to *optionally* be an `async` function. # If it is, then the `response` variable need to be awaited to actually # return the result. - # https://simonwillison.net/2020/Sep/2/await-me-maybe/ - if asyncio.iscoroutine(response): + if not isinstance(response, Response): response = await response - return ( - response.status_code, - response.headers.raw, - response.stream, - response.extensions, - ) + return response diff --git a/packages/httpx/_transports/wsgi.py b/packages/httpx/_transports/wsgi.py index c8266c739..a23d42c41 100644 --- a/packages/httpx/_transports/wsgi.py +++ b/packages/httpx/_transports/wsgi.py @@ -1,12 +1,20 @@ import io import itertools +import sys import typing -from urllib.parse import unquote -from .base import BaseTransport, SyncByteStream +from .._models import Request, Response +from .._types import SyncByteStream +from .base import BaseTransport +if typing.TYPE_CHECKING: + from _typeshed import OptExcInfo # pragma: no cover + from _typeshed.wsgi import WSGIApplication # pragma: no cover -def _skip_leading_empty_chunks(body: typing.Iterable) -> typing.Iterable: +_T = typing.TypeVar("_T") + + +def _skip_leading_empty_chunks(body: typing.Iterable[_T]) -> typing.Iterable[_T]: body = iter(body) for chunk in body: if chunk: @@ -16,12 +24,17 @@ def _skip_leading_empty_chunks(body: typing.Iterable) -> typing.Iterable: class WSGIByteStream(SyncByteStream): def __init__(self, result: typing.Iterable[bytes]) -> None: + self._close = getattr(result, "close", None) self._result = _skip_leading_empty_chunks(result) def __iter__(self) -> typing.Iterator[bytes]: for part in self._result: yield part + def close(self) -> None: + if self._close is not None: + self._close() + class WSGITransport(BaseTransport): """ @@ -47,61 +60,52 @@ class WSGITransport(BaseTransport): Arguments: - * `app` - The ASGI application. + * `app` - The WSGI application. * `raise_app_exceptions` - Boolean indicating if exceptions in the application should be raised. Default to `True`. Can be set to `False` for use cases such as testing the content of a client 500 response. - * `script_name` - The root path on which the ASGI application should be mounted. + * `script_name` - The root path on which the WSGI application should be mounted. * `remote_addr` - A string indicating the client IP of incoming requests. ``` """ def __init__( self, - app: typing.Callable, + app: "WSGIApplication", raise_app_exceptions: bool = True, script_name: str = "", remote_addr: str = "127.0.0.1", + wsgi_errors: typing.Optional[typing.TextIO] = None, ) -> None: self.app = app self.raise_app_exceptions = raise_app_exceptions self.script_name = script_name self.remote_addr = remote_addr + self.wsgi_errors = wsgi_errors - def handle_request( - self, - method: bytes, - url: typing.Tuple[bytes, bytes, typing.Optional[int], bytes], - headers: typing.List[typing.Tuple[bytes, bytes]], - stream: SyncByteStream, - extensions: dict, - ) -> typing.Tuple[ - int, typing.List[typing.Tuple[bytes, bytes]], SyncByteStream, dict - ]: - wsgi_input = io.BytesIO(b"".join(stream)) - - scheme, host, port, full_path = url - path, _, query = full_path.partition(b"?") - if port is None: - port = {b"http": 80, b"https": 443}[scheme] + def handle_request(self, request: Request) -> Response: + request.read() + wsgi_input = io.BytesIO(request.content) + port = request.url.port or {"http": 80, "https": 443}[request.url.scheme] environ = { "wsgi.version": (1, 0), - "wsgi.url_scheme": scheme.decode("ascii"), + "wsgi.url_scheme": request.url.scheme, "wsgi.input": wsgi_input, - "wsgi.errors": io.BytesIO(), + "wsgi.errors": self.wsgi_errors or sys.stderr, "wsgi.multithread": True, "wsgi.multiprocess": False, "wsgi.run_once": False, - "REQUEST_METHOD": method.decode(), + "REQUEST_METHOD": request.method, "SCRIPT_NAME": self.script_name, - "PATH_INFO": unquote(path.decode("ascii")), - "QUERY_STRING": query.decode("ascii"), - "SERVER_NAME": host.decode("ascii"), + "PATH_INFO": request.url.path, + "QUERY_STRING": request.url.query.decode("ascii"), + "SERVER_NAME": request.url.host, "SERVER_PORT": str(port), + "SERVER_PROTOCOL": "HTTP/1.1", "REMOTE_ADDR": self.remote_addr, } - for header_key, header_value in headers: + for header_key, header_value in request.headers.raw: key = header_key.decode("ascii").upper().replace("-", "_") if key not in ("CONTENT_TYPE", "CONTENT_LENGTH"): key = "HTTP_" + key @@ -112,12 +116,15 @@ def handle_request( seen_exc_info = None def start_response( - status: str, response_headers: list, exc_info: typing.Any = None - ) -> None: + status: str, + response_headers: typing.List[typing.Tuple[str, str]], + exc_info: typing.Optional["OptExcInfo"] = None, + ) -> typing.Callable[[bytes], typing.Any]: nonlocal seen_status, seen_response_headers, seen_exc_info seen_status = status seen_response_headers = response_headers seen_exc_info = exc_info + return lambda _: None result = self.app(environ, start_response) @@ -125,7 +132,7 @@ def start_response( assert seen_status is not None assert seen_response_headers is not None - if seen_exc_info and self.raise_app_exceptions: + if seen_exc_info and seen_exc_info[0] and self.raise_app_exceptions: raise seen_exc_info[1] status_code = int(seen_status.split()[0]) @@ -133,6 +140,5 @@ def start_response( (key.encode("ascii"), value.encode("ascii")) for key, value in seen_response_headers ] - extensions = {} - return (status_code, headers, stream, extensions) + return Response(status_code, headers=headers, stream=stream) diff --git a/packages/httpx/_types.py b/packages/httpx/_types.py index 75bb9006c..83cf35a32 100644 --- a/packages/httpx/_types.py +++ b/packages/httpx/_types.py @@ -7,12 +7,17 @@ from typing import ( IO, TYPE_CHECKING, + Any, AsyncIterable, + AsyncIterator, Callable, Dict, Iterable, + Iterator, List, Mapping, + MutableMapping, + NamedTuple, Optional, Sequence, Tuple, @@ -22,12 +27,21 @@ if TYPE_CHECKING: # pragma: no cover from ._auth import Auth # noqa: F401 from ._config import Proxy, Timeout # noqa: F401 - from ._models import URL, Cookies, Headers, QueryParams, Request # noqa: F401 + from ._models import Cookies, Headers, Request # noqa: F401 + from ._urls import URL, QueryParams # noqa: F401 PrimitiveData = Optional[Union[str, int, float, bool]] -RawURL = Tuple[bytes, bytes, Optional[int], bytes] +RawURL = NamedTuple( + "RawURL", + [ + ("raw_scheme", bytes), + ("raw_host", bytes), + ("port", Optional[int]), + ("raw_path", bytes), + ], +) URLTypes = Union["URL", str] @@ -38,13 +52,12 @@ Tuple[Tuple[str, PrimitiveData], ...], str, bytes, - None, ] HeaderTypes = Union[ "Headers", - Dict[str, str], - Dict[bytes, bytes], + Mapping[str, str], + Mapping[bytes, bytes], Sequence[Tuple[str, str]], Sequence[Tuple[bytes, bytes]], ] @@ -71,21 +84,50 @@ Tuple[Union[str, bytes], Union[str, bytes]], Callable[["Request"], "Request"], "Auth", - None, ] RequestContent = Union[str, bytes, Iterable[bytes], AsyncIterable[bytes]] ResponseContent = Union[str, bytes, Iterable[bytes], AsyncIterable[bytes]] +ResponseExtensions = MutableMapping[str, Any] -RequestData = dict +RequestData = Mapping[str, Any] -FileContent = Union[IO[str], IO[bytes], str, bytes] +FileContent = Union[IO[bytes], bytes, str] FileTypes = Union[ - # file (or text) + # file (or bytes) FileContent, - # (filename, file (or text)) + # (filename, file (or bytes)) Tuple[Optional[str], FileContent], - # (filename, file (or text), content_type) + # (filename, file (or bytes), content_type) Tuple[Optional[str], FileContent, Optional[str]], + # (filename, file (or bytes), content_type, headers) + Tuple[Optional[str], FileContent, Optional[str], Mapping[str, str]], ] RequestFiles = Union[Mapping[str, FileTypes], Sequence[Tuple[str, FileTypes]]] + +RequestExtensions = MutableMapping[str, Any] + + +class SyncByteStream: + def __iter__(self) -> Iterator[bytes]: + raise NotImplementedError( + "The '__iter__' method must be implemented." + ) # pragma: no cover + yield b"" # pragma: no cover + + def close(self) -> None: + """ + Subclasses can override this method to release any network resources + after a request/response cycle is complete. + """ + + +class AsyncByteStream: + async def __aiter__(self) -> AsyncIterator[bytes]: + raise NotImplementedError( + "The '__aiter__' method must be implemented." + ) # pragma: no cover + yield b"" # pragma: no cover + + async def aclose(self) -> None: + pass diff --git a/packages/httpx/_urlparse.py b/packages/httpx/_urlparse.py new file mode 100644 index 000000000..e1ba8dcdb --- /dev/null +++ b/packages/httpx/_urlparse.py @@ -0,0 +1,464 @@ +""" +An implementation of `urlparse` that provides URL validation and normalization +as described by RFC3986. + +We rely on this implementation rather than the one in Python's stdlib, because: + +* It provides more complete URL validation. +* It properly differentiates between an empty querystring and an absent querystring, + to distinguish URLs with a trailing '?'. +* It handles scheme, hostname, port, and path normalization. +* It supports IDNA hostnames, normalizing them to their encoded form. +* The API supports passing individual components, as well as the complete URL string. + +Previously we relied on the excellent `rfc3986` package to handle URL parsing and +validation, but this module provides a simpler alternative, with less indirection +required. +""" +import ipaddress +import re +import typing + +import idna + +from ._exceptions import InvalidURL + +MAX_URL_LENGTH = 65536 + +# https://datatracker.ietf.org/doc/html/rfc3986.html#section-2.3 +UNRESERVED_CHARACTERS = ( + "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-._~" +) +SUB_DELIMS = "!$&'()*+,;=" + +PERCENT_ENCODED_REGEX = re.compile("%[A-Fa-f0-9]{2}") + + +# {scheme}: (optional) +# //{authority} (optional) +# {path} +# ?{query} (optional) +# #{fragment} (optional) +URL_REGEX = re.compile( + ( + r"(?:(?P{scheme}):)?" + r"(?://(?P{authority}))?" + r"(?P{path})" + r"(?:\?(?P{query}))?" + r"(?:#(?P{fragment}))?" + ).format( + scheme="([a-zA-Z][a-zA-Z0-9+.-]*)?", + authority="[^/?#]*", + path="[^?#]*", + query="[^#]*", + fragment=".*", + ) +) + +# {userinfo}@ (optional) +# {host} +# :{port} (optional) +AUTHORITY_REGEX = re.compile( + ( + r"(?:(?P{userinfo})@)?" r"(?P{host})" r":?(?P{port})?" + ).format( + userinfo="[^@]*", # Any character sequence not including '@'. + host="(\\[.*\\]|[^:]*)", # Either any character sequence not including ':', + # or an IPv6 address enclosed within square brackets. + port=".*", # Any character sequence. + ) +) + + +# If we call urlparse with an individual component, then we need to regex +# validate that component individually. +# Note that we're duplicating the same strings as above. Shock! Horror!! +COMPONENT_REGEX = { + "scheme": re.compile("([a-zA-Z][a-zA-Z0-9+.-]*)?"), + "authority": re.compile("[^/?#]*"), + "path": re.compile("[^?#]*"), + "query": re.compile("[^#]*"), + "fragment": re.compile(".*"), + "userinfo": re.compile("[^@]*"), + "host": re.compile("(\\[.*\\]|[^:]*)"), + "port": re.compile(".*"), +} + + +# We use these simple regexs as a first pass before handing off to +# the stdlib 'ipaddress' module for IP address validation. +IPv4_STYLE_HOSTNAME = re.compile(r"^[0-9]+.[0-9]+.[0-9]+.[0-9]+$") +IPv6_STYLE_HOSTNAME = re.compile(r"^\[.*\]$") + + +class ParseResult(typing.NamedTuple): + scheme: str + userinfo: str + host: str + port: typing.Optional[int] + path: str + query: typing.Optional[str] + fragment: typing.Optional[str] + + @property + def authority(self) -> str: + return "".join( + [ + f"{self.userinfo}@" if self.userinfo else "", + f"[{self.host}]" if ":" in self.host else self.host, + f":{self.port}" if self.port is not None else "", + ] + ) + + @property + def netloc(self) -> str: + return "".join( + [ + f"[{self.host}]" if ":" in self.host else self.host, + f":{self.port}" if self.port is not None else "", + ] + ) + + def copy_with(self, **kwargs: typing.Optional[str]) -> "ParseResult": + if not kwargs: + return self + + defaults = { + "scheme": self.scheme, + "authority": self.authority, + "path": self.path, + "query": self.query, + "fragment": self.fragment, + } + defaults.update(kwargs) + return urlparse("", **defaults) + + def __str__(self) -> str: + authority = self.authority + return "".join( + [ + f"{self.scheme}:" if self.scheme else "", + f"//{authority}" if authority else "", + self.path, + f"?{self.query}" if self.query is not None else "", + f"#{self.fragment}" if self.fragment is not None else "", + ] + ) + + +def urlparse(url: str = "", **kwargs: typing.Optional[str]) -> ParseResult: + # Initial basic checks on allowable URLs. + # --------------------------------------- + + # Hard limit the maximum allowable URL length. + if len(url) > MAX_URL_LENGTH: + raise InvalidURL("URL too long") + + # If a URL includes any ASCII control characters including \t, \r, \n, + # then treat it as invalid. + if any(char.isascii() and not char.isprintable() for char in url): + raise InvalidURL("Invalid non-printable ASCII character in URL") + + # Some keyword arguments require special handling. + # ------------------------------------------------ + + # Coerce "port" to a string, if it is provided as an integer. + if "port" in kwargs: + port = kwargs["port"] + kwargs["port"] = str(port) if isinstance(port, int) else port + + # Replace "netloc" with "host and "port". + if "netloc" in kwargs: + netloc = kwargs.pop("netloc") or "" + kwargs["host"], _, kwargs["port"] = netloc.partition(":") + + # Replace "username" and/or "password" with "userinfo". + if "username" in kwargs or "password" in kwargs: + username = quote(kwargs.pop("username", "") or "") + password = quote(kwargs.pop("password", "") or "") + kwargs["userinfo"] = f"{username}:{password}" if password else username + + # Replace "raw_path" with "path" and "query". + if "raw_path" in kwargs: + raw_path = kwargs.pop("raw_path") or "" + kwargs["path"], seperator, kwargs["query"] = raw_path.partition("?") + if not seperator: + kwargs["query"] = None + + # Ensure that IPv6 "host" addresses are always escaped with "[...]". + if "host" in kwargs: + host = kwargs.get("host") or "" + if ":" in host and not (host.startswith("[") and host.endswith("]")): + kwargs["host"] = f"[{host}]" + + # If any keyword arguments are provided, ensure they are valid. + # ------------------------------------------------------------- + + for key, value in kwargs.items(): + if value is not None: + if len(value) > MAX_URL_LENGTH: + raise InvalidURL(f"URL component '{key}' too long") + + # If a component includes any ASCII control characters including \t, \r, \n, + # then treat it as invalid. + if any(char.isascii() and not char.isprintable() for char in value): + raise InvalidURL( + f"Invalid non-printable ASCII character in URL component '{key}'" + ) + + # Ensure that keyword arguments match as a valid regex. + if not COMPONENT_REGEX[key].fullmatch(value): + raise InvalidURL(f"Invalid URL component '{key}'") + + # The URL_REGEX will always match, but may have empty components. + url_match = URL_REGEX.match(url) + assert url_match is not None + url_dict = url_match.groupdict() + + # * 'scheme', 'authority', and 'path' may be empty strings. + # * 'query' may be 'None', indicating no trailing "?" portion. + # Any string including the empty string, indicates a trailing "?". + # * 'fragment' may be 'None', indicating no trailing "#" portion. + # Any string including the empty string, indicates a trailing "#". + scheme = kwargs.get("scheme", url_dict["scheme"]) or "" + authority = kwargs.get("authority", url_dict["authority"]) or "" + path = kwargs.get("path", url_dict["path"]) or "" + query = kwargs.get("query", url_dict["query"]) + fragment = kwargs.get("fragment", url_dict["fragment"]) + + # The AUTHORITY_REGEX will always match, but may have empty components. + authority_match = AUTHORITY_REGEX.match(authority) + assert authority_match is not None + authority_dict = authority_match.groupdict() + + # * 'userinfo' and 'host' may be empty strings. + # * 'port' may be 'None'. + userinfo = kwargs.get("userinfo", authority_dict["userinfo"]) or "" + host = kwargs.get("host", authority_dict["host"]) or "" + port = kwargs.get("port", authority_dict["port"]) + + # Normalize and validate each component. + # We end up with a parsed representation of the URL, + # with components that are plain ASCII bytestrings. + parsed_scheme: str = scheme.lower() + parsed_userinfo: str = quote(userinfo, safe=SUB_DELIMS + ":") + parsed_host: str = encode_host(host) + parsed_port: typing.Optional[int] = normalize_port(port, scheme) + + has_scheme = parsed_scheme != "" + has_authority = ( + parsed_userinfo != "" or parsed_host != "" or parsed_port is not None + ) + validate_path(path, has_scheme=has_scheme, has_authority=has_authority) + if has_authority: + path = normalize_path(path) + + # The GEN_DELIMS set is... : / ? # [ ] @ + # These do not need to be percent-quoted unless they serve as delimiters for the + # specific component. + + # For 'path' we need to drop ? and # from the GEN_DELIMS set. + parsed_path: str = quote(path, safe=SUB_DELIMS + ":/[]@") + # For 'query' we need to drop '#' from the GEN_DELIMS set. + # We also exclude '/' because it is more robust to replace it with a percent + # encoding despite it not being a requirement of the spec. + parsed_query: typing.Optional[str] = ( + None if query is None else quote(query, safe=SUB_DELIMS + ":?[]@") + ) + # For 'fragment' we can include all of the GEN_DELIMS set. + parsed_fragment: typing.Optional[str] = ( + None if fragment is None else quote(fragment, safe=SUB_DELIMS + ":/?#[]@") + ) + + # The parsed ASCII bytestrings are our canonical form. + # All properties of the URL are derived from these. + return ParseResult( + parsed_scheme, + parsed_userinfo, + parsed_host, + parsed_port, + parsed_path, + parsed_query, + parsed_fragment, + ) + + +def encode_host(host: str) -> str: + if not host: + return "" + + elif IPv4_STYLE_HOSTNAME.match(host): + # Validate IPv4 hostnames like #.#.#.# + # + # From https://datatracker.ietf.org/doc/html/rfc3986/#section-3.2.2 + # + # IPv4address = dec-octet "." dec-octet "." dec-octet "." dec-octet + try: + ipaddress.IPv4Address(host) + except ipaddress.AddressValueError: + raise InvalidURL(f"Invalid IPv4 address: {host!r}") + return host + + elif IPv6_STYLE_HOSTNAME.match(host): + # Validate IPv6 hostnames like [...] + # + # From https://datatracker.ietf.org/doc/html/rfc3986/#section-3.2.2 + # + # "A host identified by an Internet Protocol literal address, version 6 + # [RFC3513] or later, is distinguished by enclosing the IP literal + # within square brackets ("[" and "]"). This is the only place where + # square bracket characters are allowed in the URI syntax." + try: + ipaddress.IPv6Address(host[1:-1]) + except ipaddress.AddressValueError: + raise InvalidURL(f"Invalid IPv6 address: {host!r}") + return host[1:-1] + + elif host.isascii(): + # Regular ASCII hostnames + # + # From https://datatracker.ietf.org/doc/html/rfc3986/#section-3.2.2 + # + # reg-name = *( unreserved / pct-encoded / sub-delims ) + return quote(host.lower(), safe=SUB_DELIMS) + + # IDNA hostnames + try: + return idna.encode(host.lower()).decode("ascii") + except idna.IDNAError: + raise InvalidURL(f"Invalid IDNA hostname: {host!r}") + + +def normalize_port( + port: typing.Optional[typing.Union[str, int]], scheme: str +) -> typing.Optional[int]: + # From https://tools.ietf.org/html/rfc3986#section-3.2.3 + # + # "A scheme may define a default port. For example, the "http" scheme + # defines a default port of "80", corresponding to its reserved TCP + # port number. The type of port designated by the port number (e.g., + # TCP, UDP, SCTP) is defined by the URI scheme. URI producers and + # normalizers should omit the port component and its ":" delimiter if + # port is empty or if its value would be the same as that of the + # scheme's default." + if port is None or port == "": + return None + + try: + port_as_int = int(port) + except ValueError: + raise InvalidURL(f"Invalid port: {port!r}") + + # See https://url.spec.whatwg.org/#url-miscellaneous + default_port = {"ftp": 21, "http": 80, "https": 443, "ws": 80, "wss": 443}.get( + scheme + ) + if port_as_int == default_port: + return None + return port_as_int + + +def validate_path(path: str, has_scheme: bool, has_authority: bool) -> None: + """ + Path validation rules that depend on if the URL contains a scheme or authority component. + + See https://datatracker.ietf.org/doc/html/rfc3986.html#section-3.3 + """ + if has_authority: + # > If a URI contains an authority component, then the path component + # > must either be empty or begin with a slash ("/") character." + if path and not path.startswith("/"): + raise InvalidURL("For absolute URLs, path must be empty or begin with '/'") + else: + # > If a URI does not contain an authority component, then the path cannot begin + # > with two slash characters ("//"). + if path.startswith("//"): + raise InvalidURL( + "URLs with no authority component cannot have a path starting with '//'" + ) + # > In addition, a URI reference (Section 4.1) may be a relative-path reference, in which + # > case the first path segment cannot contain a colon (":") character. + if path.startswith(":") and not has_scheme: + raise InvalidURL( + "URLs with no scheme component cannot have a path starting with ':'" + ) + + +def normalize_path(path: str) -> str: + """ + Drop "." and ".." segments from a URL path. + + For example: + + normalize_path("/path/./to/somewhere/..") == "/path/to" + """ + # https://datatracker.ietf.org/doc/html/rfc3986#section-5.2.4 + components = path.split("/") + output: typing.List[str] = [] + for component in components: + if component == ".": + pass + elif component == "..": + if output and output != [""]: + output.pop() + else: + output.append(component) + return "/".join(output) + + +def percent_encode(char: str) -> str: + """ + Replace a single character with the percent-encoded representation. + + Characters outside the ASCII range are represented with their a percent-encoded + representation of their UTF-8 byte sequence. + + For example: + + percent_encode(" ") == "%20" + """ + return "".join([f"%{byte:02x}" for byte in char.encode("utf-8")]).upper() + + +def is_safe(string: str, safe: str = "/") -> bool: + """ + Determine if a given string is already quote-safe. + """ + NON_ESCAPED_CHARS = UNRESERVED_CHARACTERS + safe + "%" + + # All characters must already be non-escaping or '%' + for char in string: + if char not in NON_ESCAPED_CHARS: + return False + + # Any '%' characters must be valid '%xx' escape sequences. + return string.count("%") == len(PERCENT_ENCODED_REGEX.findall(string)) + + +def quote(string: str, safe: str = "/") -> str: + """ + Use percent-encoding to quote a string if required. + """ + if is_safe(string, safe=safe): + return string + + NON_ESCAPED_CHARS = UNRESERVED_CHARACTERS + safe + return "".join( + [char if char in NON_ESCAPED_CHARS else percent_encode(char) for char in string] + ) + + +def urlencode(items: typing.List[typing.Tuple[str, str]]) -> str: + # We can use a much simpler version of the stdlib urlencode here because + # we don't need to handle a bunch of different typing cases, such as bytes vs str. + # + # https://github.com/python/cpython/blob/b2f7b2ef0b5421e01efb8c7bee2ef95d3bab77eb/Lib/urllib/parse.py#L926 + # + # Note that we use '%20' encoding for spaces. and '%2F for '/'. + # This is slightly different than `requests`, but is the behaviour that browsers use. + # + # See + # - https://github.com/encode/httpx/issues/2536 + # - https://github.com/encode/httpx/issues/2721 + # - https://docs.python.org/3/library/urllib.parse.html#urllib.parse.urlencode + return "&".join([quote(k, safe="") + "=" + quote(v, safe="") for k, v in items]) diff --git a/packages/httpx/_urls.py b/packages/httpx/_urls.py new file mode 100644 index 000000000..b023941b6 --- /dev/null +++ b/packages/httpx/_urls.py @@ -0,0 +1,642 @@ +import typing +from urllib.parse import parse_qs, unquote + +import idna + +from ._types import QueryParamTypes, RawURL, URLTypes +from ._urlparse import urlencode, urlparse +from ._utils import primitive_value_to_str + + +class URL: + """ + url = httpx.URL("HTTPS://jo%40email.com:a%20secret@müller.de:1234/pa%20th?search=ab#anchorlink") + + assert url.scheme == "https" + assert url.username == "jo@email.com" + assert url.password == "a secret" + assert url.userinfo == b"jo%40email.com:a%20secret" + assert url.host == "müller.de" + assert url.raw_host == b"xn--mller-kva.de" + assert url.port == 1234 + assert url.netloc == b"xn--mller-kva.de:1234" + assert url.path == "/pa th" + assert url.query == b"?search=ab" + assert url.raw_path == b"/pa%20th?search=ab" + assert url.fragment == "anchorlink" + + The components of a URL are broken down like this: + + https://jo%40email.com:a%20secret@müller.de:1234/pa%20th?search=ab#anchorlink + [scheme] [ username ] [password] [ host ][port][ path ] [ query ] [fragment] + [ userinfo ] [ netloc ][ raw_path ] + + Note that: + + * `url.scheme` is normalized to always be lowercased. + + * `url.host` is normalized to always be lowercased. Internationalized domain + names are represented in unicode, without IDNA encoding applied. For instance: + + url = httpx.URL("http://中国.icom.museum") + assert url.host == "中国.icom.museum" + url = httpx.URL("http://xn--fiqs8s.icom.museum") + assert url.host == "中国.icom.museum" + + * `url.raw_host` is normalized to always be lowercased, and is IDNA encoded. + + url = httpx.URL("http://中国.icom.museum") + assert url.raw_host == b"xn--fiqs8s.icom.museum" + url = httpx.URL("http://xn--fiqs8s.icom.museum") + assert url.raw_host == b"xn--fiqs8s.icom.museum" + + * `url.port` is either None or an integer. URLs that include the default port for + "http", "https", "ws", "wss", and "ftp" schemes have their port normalized to `None`. + + assert httpx.URL("http://example.com") == httpx.URL("http://example.com:80") + assert httpx.URL("http://example.com").port is None + assert httpx.URL("http://example.com:80").port is None + + * `url.userinfo` is raw bytes, without URL escaping. Usually you'll want to work with + `url.username` and `url.password` instead, which handle the URL escaping. + + * `url.raw_path` is raw bytes of both the path and query, without URL escaping. + This portion is used as the target when constructing HTTP requests. Usually you'll + want to work with `url.path` instead. + + * `url.query` is raw bytes, without URL escaping. A URL query string portion can only + be properly URL escaped when decoding the parameter names and values themselves. + """ + + def __init__( + self, url: typing.Union["URL", str] = "", **kwargs: typing.Any + ) -> None: + if kwargs: + allowed = { + "scheme": str, + "username": str, + "password": str, + "userinfo": bytes, + "host": str, + "port": int, + "netloc": bytes, + "path": str, + "query": bytes, + "raw_path": bytes, + "fragment": str, + "params": object, + } + + # Perform type checking for all supported keyword arguments. + for key, value in kwargs.items(): + if key not in allowed: + message = f"{key!r} is an invalid keyword argument for URL()" + raise TypeError(message) + if value is not None and not isinstance(value, allowed[key]): + expected = allowed[key].__name__ + seen = type(value).__name__ + message = f"Argument {key!r} must be {expected} but got {seen}" + raise TypeError(message) + if isinstance(value, bytes): + kwargs[key] = value.decode("ascii") + + if "params" in kwargs: + # Replace any "params" keyword with the raw "query" instead. + # + # Ensure that empty params use `kwargs["query"] = None` rather + # than `kwargs["query"] = ""`, so that generated URLs do not + # include an empty trailing "?". + params = kwargs.pop("params") + kwargs["query"] = None if not params else str(QueryParams(params)) + + if isinstance(url, str): + self._uri_reference = urlparse(url, **kwargs) + elif isinstance(url, URL): + self._uri_reference = url._uri_reference.copy_with(**kwargs) + else: + raise TypeError( + f"Invalid type for url. Expected str or httpx.URL, got {type(url)}: {url!r}" + ) + + @property + def scheme(self) -> str: + """ + The URL scheme, such as "http", "https". + Always normalised to lowercase. + """ + return self._uri_reference.scheme + + @property + def raw_scheme(self) -> bytes: + """ + The raw bytes representation of the URL scheme, such as b"http", b"https". + Always normalised to lowercase. + """ + return self._uri_reference.scheme.encode("ascii") + + @property + def userinfo(self) -> bytes: + """ + The URL userinfo as a raw bytestring. + For example: b"jo%40email.com:a%20secret". + """ + return self._uri_reference.userinfo.encode("ascii") + + @property + def username(self) -> str: + """ + The URL username as a string, with URL decoding applied. + For example: "jo@email.com" + """ + userinfo = self._uri_reference.userinfo + return unquote(userinfo.partition(":")[0]) + + @property + def password(self) -> str: + """ + The URL password as a string, with URL decoding applied. + For example: "a secret" + """ + userinfo = self._uri_reference.userinfo + return unquote(userinfo.partition(":")[2]) + + @property + def host(self) -> str: + """ + The URL host as a string. + Always normalized to lowercase, with IDNA hosts decoded into unicode. + + Examples: + + url = httpx.URL("http://www.EXAMPLE.org") + assert url.host == "www.example.org" + + url = httpx.URL("http://中国.icom.museum") + assert url.host == "中国.icom.museum" + + url = httpx.URL("http://xn--fiqs8s.icom.museum") + assert url.host == "中国.icom.museum" + + url = httpx.URL("https://[::ffff:192.168.0.1]") + assert url.host == "::ffff:192.168.0.1" + """ + host: str = self._uri_reference.host + + if host.startswith("xn--"): + host = idna.decode(host) + + return host + + @property + def raw_host(self) -> bytes: + """ + The raw bytes representation of the URL host. + Always normalized to lowercase, and IDNA encoded. + + Examples: + + url = httpx.URL("http://www.EXAMPLE.org") + assert url.raw_host == b"www.example.org" + + url = httpx.URL("http://中国.icom.museum") + assert url.raw_host == b"xn--fiqs8s.icom.museum" + + url = httpx.URL("http://xn--fiqs8s.icom.museum") + assert url.raw_host == b"xn--fiqs8s.icom.museum" + + url = httpx.URL("https://[::ffff:192.168.0.1]") + assert url.raw_host == b"::ffff:192.168.0.1" + """ + return self._uri_reference.host.encode("ascii") + + @property + def port(self) -> typing.Optional[int]: + """ + The URL port as an integer. + + Note that the URL class performs port normalization as per the WHATWG spec. + Default ports for "http", "https", "ws", "wss", and "ftp" schemes are always + treated as `None`. + + For example: + + assert httpx.URL("http://www.example.com") == httpx.URL("http://www.example.com:80") + assert httpx.URL("http://www.example.com:80").port is None + """ + return self._uri_reference.port + + @property + def netloc(self) -> bytes: + """ + Either `` or `:` as bytes. + Always normalized to lowercase, and IDNA encoded. + + This property may be used for generating the value of a request + "Host" header. + """ + return self._uri_reference.netloc.encode("ascii") + + @property + def path(self) -> str: + """ + The URL path as a string. Excluding the query string, and URL decoded. + + For example: + + url = httpx.URL("https://example.com/pa%20th") + assert url.path == "/pa th" + """ + path = self._uri_reference.path or "/" + return unquote(path) + + @property + def query(self) -> bytes: + """ + The URL query string, as raw bytes, excluding the leading b"?". + + This is necessarily a bytewise interface, because we cannot + perform URL decoding of this representation until we've parsed + the keys and values into a QueryParams instance. + + For example: + + url = httpx.URL("https://example.com/?filter=some%20search%20terms") + assert url.query == b"filter=some%20search%20terms" + """ + query = self._uri_reference.query or "" + return query.encode("ascii") + + @property + def params(self) -> "QueryParams": + """ + The URL query parameters, neatly parsed and packaged into an immutable + multidict representation. + """ + return QueryParams(self._uri_reference.query) + + @property + def raw_path(self) -> bytes: + """ + The complete URL path and query string as raw bytes. + Used as the target when constructing HTTP requests. + + For example: + + GET /users?search=some%20text HTTP/1.1 + Host: www.example.org + Connection: close + """ + path = self._uri_reference.path or "/" + if self._uri_reference.query is not None: + path += "?" + self._uri_reference.query + return path.encode("ascii") + + @property + def fragment(self) -> str: + """ + The URL fragments, as used in HTML anchors. + As a string, without the leading '#'. + """ + return unquote(self._uri_reference.fragment or "") + + @property + def raw(self) -> RawURL: + """ + Provides the (scheme, host, port, target) for the outgoing request. + + In older versions of `httpx` this was used in the low-level transport API. + We no longer use `RawURL`, and this property will be deprecated in a future release. + """ + return RawURL( + self.raw_scheme, + self.raw_host, + self.port, + self.raw_path, + ) + + @property + def is_absolute_url(self) -> bool: + """ + Return `True` for absolute URLs such as 'http://example.com/path', + and `False` for relative URLs such as '/path'. + """ + # We don't use `.is_absolute` from `rfc3986` because it treats + # URLs with a fragment portion as not absolute. + # What we actually care about is if the URL provides + # a scheme and hostname to which connections should be made. + return bool(self._uri_reference.scheme and self._uri_reference.host) + + @property + def is_relative_url(self) -> bool: + """ + Return `False` for absolute URLs such as 'http://example.com/path', + and `True` for relative URLs such as '/path'. + """ + return not self.is_absolute_url + + def copy_with(self, **kwargs: typing.Any) -> "URL": + """ + Copy this URL, returning a new URL with some components altered. + Accepts the same set of parameters as the components that are made + available via properties on the `URL` class. + + For example: + + url = httpx.URL("https://www.example.com").copy_with(username="jo@gmail.com", password="a secret") + assert url == "https://jo%40email.com:a%20secret@www.example.com" + """ + return URL(self, **kwargs) + + def copy_set_param(self, key: str, value: typing.Any = None) -> "URL": + return self.copy_with(params=self.params.set(key, value)) + + def copy_add_param(self, key: str, value: typing.Any = None) -> "URL": + return self.copy_with(params=self.params.add(key, value)) + + def copy_remove_param(self, key: str) -> "URL": + return self.copy_with(params=self.params.remove(key)) + + def copy_merge_params(self, params: QueryParamTypes) -> "URL": + return self.copy_with(params=self.params.merge(params)) + + def join(self, url: URLTypes) -> "URL": + """ + Return an absolute URL, using this URL as the base. + + Eg. + + url = httpx.URL("https://www.example.com/test") + url = url.join("/new/path") + assert url == "https://www.example.com/new/path" + """ + from urllib.parse import urljoin + + return URL(urljoin(str(self), str(URL(url)))) + + def __hash__(self) -> int: + return hash(str(self)) + + def __eq__(self, other: typing.Any) -> bool: + return isinstance(other, (URL, str)) and str(self) == str(URL(other)) + + def __str__(self) -> str: + return str(self._uri_reference) + + def __repr__(self) -> str: + scheme, userinfo, host, port, path, query, fragment = self._uri_reference + + if ":" in userinfo: + # Mask any password component. + userinfo = f'{userinfo.split(":")[0]}:[secure]' + + authority = "".join( + [ + f"{userinfo}@" if userinfo else "", + f"[{host}]" if ":" in host else host, + f":{port}" if port is not None else "", + ] + ) + url = "".join( + [ + f"{self.scheme}:" if scheme else "", + f"//{authority}" if authority else "", + path, + f"?{query}" if query is not None else "", + f"#{fragment}" if fragment is not None else "", + ] + ) + + return f"{self.__class__.__name__}({url!r})" + + +class QueryParams(typing.Mapping[str, str]): + """ + URL query parameters, as a multi-dict. + """ + + def __init__( + self, *args: typing.Optional[QueryParamTypes], **kwargs: typing.Any + ) -> None: + assert len(args) < 2, "Too many arguments." + assert not (args and kwargs), "Cannot mix named and unnamed arguments." + + value = args[0] if args else kwargs + + if value is None or isinstance(value, (str, bytes)): + value = value.decode("ascii") if isinstance(value, bytes) else value + self._dict = parse_qs(value, keep_blank_values=True) + elif isinstance(value, QueryParams): + self._dict = {k: list(v) for k, v in value._dict.items()} + else: + dict_value: typing.Dict[typing.Any, typing.List[typing.Any]] = {} + if isinstance(value, (list, tuple)): + # Convert list inputs like: + # [("a", "123"), ("a", "456"), ("b", "789")] + # To a dict representation, like: + # {"a": ["123", "456"], "b": ["789"]} + for item in value: + dict_value.setdefault(item[0], []).append(item[1]) + else: + # Convert dict inputs like: + # {"a": "123", "b": ["456", "789"]} + # To dict inputs where values are always lists, like: + # {"a": ["123"], "b": ["456", "789"]} + dict_value = { + k: list(v) if isinstance(v, (list, tuple)) else [v] + for k, v in value.items() + } + + # Ensure that keys and values are neatly coerced to strings. + # We coerce values `True` and `False` to JSON-like "true" and "false" + # representations, and coerce `None` values to the empty string. + self._dict = { + str(k): [primitive_value_to_str(item) for item in v] + for k, v in dict_value.items() + } + + def keys(self) -> typing.KeysView[str]: + """ + Return all the keys in the query params. + + Usage: + + q = httpx.QueryParams("a=123&a=456&b=789") + assert list(q.keys()) == ["a", "b"] + """ + return self._dict.keys() + + def values(self) -> typing.ValuesView[str]: + """ + Return all the values in the query params. If a key occurs more than once + only the first item for that key is returned. + + Usage: + + q = httpx.QueryParams("a=123&a=456&b=789") + assert list(q.values()) == ["123", "789"] + """ + return {k: v[0] for k, v in self._dict.items()}.values() + + def items(self) -> typing.ItemsView[str, str]: + """ + Return all items in the query params. If a key occurs more than once + only the first item for that key is returned. + + Usage: + + q = httpx.QueryParams("a=123&a=456&b=789") + assert list(q.items()) == [("a", "123"), ("b", "789")] + """ + return {k: v[0] for k, v in self._dict.items()}.items() + + def multi_items(self) -> typing.List[typing.Tuple[str, str]]: + """ + Return all items in the query params. Allow duplicate keys to occur. + + Usage: + + q = httpx.QueryParams("a=123&a=456&b=789") + assert list(q.multi_items()) == [("a", "123"), ("a", "456"), ("b", "789")] + """ + multi_items: typing.List[typing.Tuple[str, str]] = [] + for k, v in self._dict.items(): + multi_items.extend([(k, i) for i in v]) + return multi_items + + def get(self, key: typing.Any, default: typing.Any = None) -> typing.Any: + """ + Get a value from the query param for a given key. If the key occurs + more than once, then only the first value is returned. + + Usage: + + q = httpx.QueryParams("a=123&a=456&b=789") + assert q.get("a") == "123" + """ + if key in self._dict: + return self._dict[str(key)][0] + return default + + def get_list(self, key: str) -> typing.List[str]: + """ + Get all values from the query param for a given key. + + Usage: + + q = httpx.QueryParams("a=123&a=456&b=789") + assert q.get_list("a") == ["123", "456"] + """ + return list(self._dict.get(str(key), [])) + + def set(self, key: str, value: typing.Any = None) -> "QueryParams": + """ + Return a new QueryParams instance, setting the value of a key. + + Usage: + + q = httpx.QueryParams("a=123") + q = q.set("a", "456") + assert q == httpx.QueryParams("a=456") + """ + q = QueryParams() + q._dict = dict(self._dict) + q._dict[str(key)] = [primitive_value_to_str(value)] + return q + + def add(self, key: str, value: typing.Any = None) -> "QueryParams": + """ + Return a new QueryParams instance, setting or appending the value of a key. + + Usage: + + q = httpx.QueryParams("a=123") + q = q.add("a", "456") + assert q == httpx.QueryParams("a=123&a=456") + """ + q = QueryParams() + q._dict = dict(self._dict) + q._dict[str(key)] = q.get_list(key) + [primitive_value_to_str(value)] + return q + + def remove(self, key: str) -> "QueryParams": + """ + Return a new QueryParams instance, removing the value of a key. + + Usage: + + q = httpx.QueryParams("a=123") + q = q.remove("a") + assert q == httpx.QueryParams("") + """ + q = QueryParams() + q._dict = dict(self._dict) + q._dict.pop(str(key), None) + return q + + def merge(self, params: typing.Optional[QueryParamTypes] = None) -> "QueryParams": + """ + Return a new QueryParams instance, updated with. + + Usage: + + q = httpx.QueryParams("a=123") + q = q.merge({"b": "456"}) + assert q == httpx.QueryParams("a=123&b=456") + + q = httpx.QueryParams("a=123") + q = q.merge({"a": "456", "b": "789"}) + assert q == httpx.QueryParams("a=456&b=789") + """ + q = QueryParams(params) + q._dict = {**self._dict, **q._dict} + return q + + def __getitem__(self, key: typing.Any) -> str: + return self._dict[key][0] + + def __contains__(self, key: typing.Any) -> bool: + return key in self._dict + + def __iter__(self) -> typing.Iterator[typing.Any]: + return iter(self.keys()) + + def __len__(self) -> int: + return len(self._dict) + + def __bool__(self) -> bool: + return bool(self._dict) + + def __hash__(self) -> int: + return hash(str(self)) + + def __eq__(self, other: typing.Any) -> bool: + if not isinstance(other, self.__class__): + return False + return sorted(self.multi_items()) == sorted(other.multi_items()) + + def __str__(self) -> str: + """ + Note that we use '%20' encoding for spaces, and treat '/' as a safe + character. + + See https://github.com/encode/httpx/issues/2536 and + https://docs.python.org/3/library/urllib.parse.html#urllib.parse.urlencode + """ + return urlencode(self.multi_items()) + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + query_string = str(self) + return f"{class_name}({query_string!r})" + + def update(self, params: typing.Optional[QueryParamTypes] = None) -> None: + raise RuntimeError( + "QueryParams are immutable since 0.18.0. " + "Use `q = q.merge(...)` to create an updated copy." + ) + + def __setitem__(self, key: str, value: str) -> None: + raise RuntimeError( + "QueryParams are immutable since 0.18.0. " + "Use `q = q.set(key, value)` to create an updated copy." + ) diff --git a/packages/httpx/_utils.py b/packages/httpx/_utils.py index 30ab2ed5a..1775b1a1e 100644 --- a/packages/httpx/_utils.py +++ b/packages/httpx/_utils.py @@ -1,10 +1,9 @@ import codecs -import logging +import email.message +import ipaddress import mimetypes -import netrc import os import re -import sys import time import typing from pathlib import Path @@ -15,12 +14,12 @@ from ._types import PrimitiveData if typing.TYPE_CHECKING: # pragma: no cover - from ._models import URL + from ._urls import URL _HTML5_FORM_ENCODING_REPLACEMENTS = {'"': "%22", "\\": "\\\\"} _HTML5_FORM_ENCODING_REPLACEMENTS.update( - {chr(c): "%{:02X}".format(c) for c in range(0x00, 0x1F + 1) if c != 0x1B} + {chr(c): "%{:02X}".format(c) for c in range(0x1F + 1) if c != 0x1B} ) _HTML5_FORM_ENCODING_RE = re.compile( r"|".join([re.escape(c) for c in _HTML5_FORM_ENCODING_REPLACEMENTS.keys()]) @@ -30,7 +29,7 @@ def normalize_header_key( value: typing.Union[str, bytes], lower: bool, - encoding: str = None, + encoding: typing.Optional[str] = None, ) -> bytes: """ Coerce str/bytes into a strictly byte-wise HTTP header key. @@ -44,7 +43,7 @@ def normalize_header_key( def normalize_header_value( - value: typing.Union[str, bytes], encoding: str = None + value: typing.Union[str, bytes], encoding: typing.Optional[str] = None ) -> bytes: """ Coerce str/bytes into a strictly byte-wise HTTP header value. @@ -80,12 +79,10 @@ def is_known_encoding(encoding: str) -> bool: return True -def format_form_param(name: str, value: typing.Union[str, bytes]) -> bytes: +def format_form_param(name: str, value: str) -> bytes: """ Encode a name/value pair within a multipart form. """ - if isinstance(value, bytes): - value = value.decode() def replacer(match: typing.Match[str]) -> str: return _HTML5_FORM_ENCODING_REPLACEMENTS[match.group(0)] @@ -129,37 +126,6 @@ def guess_json_utf(data: bytes) -> typing.Optional[str]: return None -class NetRCInfo: - def __init__(self, files: typing.Optional[typing.List[str]] = None) -> None: - if files is None: - files = [os.getenv("NETRC", ""), "~/.netrc", "~/_netrc"] - self.netrc_files = files - - @property - def netrc_info(self) -> typing.Optional[netrc.netrc]: - if not hasattr(self, "_netrc_info"): - self._netrc_info = None - for file_path in self.netrc_files: - expanded_path = Path(file_path).expanduser() - try: - if expanded_path.is_file(): - self._netrc_info = netrc.netrc(str(expanded_path)) - break - except (netrc.NetrcParseError, IOError): # pragma: nocover - # Issue while reading the netrc file, ignore... - pass - return self._netrc_info - - def get_credentials(self, host: str) -> typing.Optional[typing.Tuple[str, str]]: - if self.netrc_info is None: - return None - - auth_info = self.netrc_info.authenticators(host) - if auth_info is None or auth_info[2] is None: - return None - return (auth_info[0], auth_info[2]) - - def get_ca_bundle_from_env() -> typing.Optional[str]: if "SSL_CERT_FILE" in os.environ: ssl_file = Path(os.environ["SSL_CERT_FILE"]) @@ -209,6 +175,14 @@ def parse_header_links(value: str) -> typing.List[typing.Dict[str, str]]: return links +def parse_content_type_charset(content_type: str) -> typing.Optional[str]: + # We used to use `cgi.parse_header()` here, but `cgi` became a dead battery. + # See: https://peps.python.org/pep-0594/#cgi + msg = email.message.Message() + msg["content-type"] = content_type + return msg.get_content_charset(failobj=None) + + SENSITIVE_HEADERS = {"authorization", "proxy-authorization"} @@ -221,50 +195,6 @@ def obfuscate_sensitive_headers( yield k, v -_LOGGER_INITIALIZED = False -TRACE_LOG_LEVEL = 5 - - -class Logger(logging.Logger): - # Stub for type checkers. - def trace(self, message: str, *args: typing.Any, **kwargs: typing.Any) -> None: - ... # pragma: nocover - - -def get_logger(name: str) -> Logger: - """ - Get a `logging.Logger` instance, and optionally - set up debug logging based on the HTTPX_LOG_LEVEL environment variable. - """ - global _LOGGER_INITIALIZED - - if not _LOGGER_INITIALIZED: - _LOGGER_INITIALIZED = True - logging.addLevelName(TRACE_LOG_LEVEL, "TRACE") - - log_level = os.environ.get("HTTPX_LOG_LEVEL", "").upper() - if log_level in ("DEBUG", "TRACE"): - logger = logging.getLogger("httpx") - logger.setLevel(logging.DEBUG if log_level == "DEBUG" else TRACE_LOG_LEVEL) - handler = logging.StreamHandler(sys.stderr) - handler.setFormatter( - logging.Formatter( - fmt="%(levelname)s [%(asctime)s] %(name)s - %(message)s", - datefmt="%Y-%m-%d %H:%M:%S", - ) - ) - logger.addHandler(handler) - - logger = logging.getLogger(name) - - def trace(message: str, *args: typing.Any, **kwargs: typing.Any) -> None: - logger.log(TRACE_LOG_LEVEL, message, *args, **kwargs) - - logger.trace = trace # type: ignore - - return typing.cast(Logger, logger) - - def port_or_default(url: "URL") -> typing.Optional[int]: if url.port is not None: return url.port @@ -282,6 +212,21 @@ def same_origin(url: "URL", other: "URL") -> bool: ) +def is_https_redirect(url: "URL", location: "URL") -> bool: + """ + Return 'True' if 'location' is a HTTPS upgrade of 'url' + """ + if url.host != location.host: + return False + + return ( + url.scheme == "http" + and port_or_default(url) == 80 + and location.scheme == "https" + and port_or_default(location) == 443 + ) + + def get_environment_proxies() -> typing.Dict[str, typing.Optional[str]]: """Gets proxy information from the environment""" @@ -305,7 +250,7 @@ def get_environment_proxies() -> typing.Dict[str, typing.Optional[str]]: # on how names in `NO_PROXY` are handled. if hostname == "*": # If NO_PROXY=* is used or if "*" occurs as any one of the comma - # seperated hostnames, then we should just bypass any information + # separated hostnames, then we should just bypass any information # from HTTP_PROXY, HTTPS_PROXY, ALL_PROXY, and always ignore # proxies. return {} @@ -315,7 +260,16 @@ def get_environment_proxies() -> typing.Dict[str, typing.Optional[str]]: # NO_PROXY=google.com is marked as "all://*google.com, # which disables "www.google.com" and "google.com". # (But not "wwwgoogle.com") - mounts[f"all://*{hostname}"] = None + # NO_PROXY can include domains, IPv6, IPv4 addresses and "localhost" + # NO_PROXY=example.com,::1,localhost,192.168.0.0/16 + if is_ipv4_hostname(hostname): + mounts[f"all://{hostname}"] = None + elif is_ipv6_hostname(hostname): + mounts[f"all://[{hostname}]"] = None + elif hostname.lower() == "localhost": + mounts[f"all://{hostname}"] = None + else: + mounts[f"all://*{hostname}"] = None return mounts @@ -374,10 +328,10 @@ async def _get_time(self) -> float: import trio return trio.current_time() - elif library == "curio": # pragma: nocover + elif library == "curio": # pragma: no cover import curio - return await curio.clock() + return typing.cast(float, await curio.clock()) import asyncio @@ -403,12 +357,12 @@ class URLPattern: A utility class currently used for making lookups against proxy keys... # Wildcard matching... - >>> pattern = URLPattern("all") + >>> pattern = URLPattern("all://") >>> pattern.matches(httpx.URL("http://example.com")) True # Witch scheme matching... - >>> pattern = URLPattern("https") + >>> pattern = URLPattern("https://") >>> pattern.matches(httpx.URL("https://example.com")) True >>> pattern.matches(httpx.URL("http://example.com")) @@ -441,7 +395,7 @@ class URLPattern: """ def __init__(self, pattern: str) -> None: - from ._models import URL + from ._urls import URL if pattern and ":" not in pattern: raise ValueError( @@ -457,19 +411,18 @@ def __init__(self, pattern: str) -> None: self.port = url.port if not url.host or url.host == "*": self.host_regex: typing.Optional[typing.Pattern[str]] = None + elif url.host.startswith("*."): + # *.example.com should match "www.example.com", but not "example.com" + domain = re.escape(url.host[2:]) + self.host_regex = re.compile(f"^.+\\.{domain}$") + elif url.host.startswith("*"): + # *example.com should match "www.example.com" and "example.com" + domain = re.escape(url.host[1:]) + self.host_regex = re.compile(f"^(.+\\.)?{domain}$") else: - if url.host.startswith("*."): - # *.example.com should match "www.example.com", but not "example.com" - domain = re.escape(url.host[2:]) - self.host_regex = re.compile(f"^.+\\.{domain}$") - elif url.host.startswith("*"): - # *example.com should match "www.example.com" and "example.com" - domain = re.escape(url.host[1:]) - self.host_regex = re.compile(f"^(.+\\.)?{domain}$") - else: - # example.com should match "example.com" but not "www.example.com" - domain = re.escape(url.host) - self.host_regex = re.compile(f"^{domain}$") + # example.com should match "example.com" but not "www.example.com" + domain = re.escape(url.host) + self.host_regex = re.compile(f"^{domain}$") def matches(self, other: "URL") -> bool: if self.scheme and self.scheme != other.scheme: @@ -485,7 +438,7 @@ def matches(self, other: "URL") -> bool: return True @property - def priority(self) -> tuple: + def priority(self) -> typing.Tuple[int, int, int]: """ The priority allows URLPattern instances to be sortable, so that we can match from most specific to least specific. @@ -506,3 +459,19 @@ def __lt__(self, other: "URLPattern") -> bool: def __eq__(self, other: typing.Any) -> bool: return isinstance(other, URLPattern) and self.pattern == other.pattern + + +def is_ipv4_hostname(hostname: str) -> bool: + try: + ipaddress.IPv4Address(hostname.split("/")[0]) + except Exception: + return False + return True + + +def is_ipv6_hostname(hostname: str) -> bool: + try: + ipaddress.IPv6Address(hostname.split("/")[0]) + except Exception: + return False + return True diff --git a/packages/idna/codec.py b/packages/idna/codec.py index 080f22a3b..eaeada582 100644 --- a/packages/idna/codec.py +++ b/packages/idna/codec.py @@ -1,14 +1,13 @@ from .core import encode, decode, alabel, ulabel, IDNAError import codecs import re -from typing import Tuple, Optional +from typing import Any, Tuple, Optional _unicode_dots_re = re.compile('[\u002e\u3002\uff0e\uff61]') class Codec(codecs.Codec): - def encode(self, data, errors='strict'): - # type: (str, str) -> Tuple[bytes, int] + def encode(self, data: str, errors: str = 'strict') -> Tuple[bytes, int]: if errors != 'strict': raise IDNAError('Unsupported error handling \"{}\"'.format(errors)) @@ -17,8 +16,7 @@ def encode(self, data, errors='strict'): return encode(data), len(data) - def decode(self, data, errors='strict'): - # type: (bytes, str) -> Tuple[str, int] + def decode(self, data: bytes, errors: str = 'strict') -> Tuple[str, int]: if errors != 'strict': raise IDNAError('Unsupported error handling \"{}\"'.format(errors)) @@ -28,25 +26,24 @@ def decode(self, data, errors='strict'): return decode(data), len(data) class IncrementalEncoder(codecs.BufferedIncrementalEncoder): - def _buffer_encode(self, data, errors, final): # type: ignore - # type: (str, str, bool) -> Tuple[str, int] + def _buffer_encode(self, data: str, errors: str, final: bool) -> Tuple[bytes, int]: if errors != 'strict': raise IDNAError('Unsupported error handling \"{}\"'.format(errors)) if not data: - return "", 0 + return b'', 0 labels = _unicode_dots_re.split(data) - trailing_dot = '' + trailing_dot = b'' if labels: if not labels[-1]: - trailing_dot = '.' + trailing_dot = b'.' del labels[-1] elif not final: # Keep potentially unfinished label until the next call del labels[-1] if labels: - trailing_dot = '.' + trailing_dot = b'.' result = [] size = 0 @@ -57,19 +54,21 @@ def _buffer_encode(self, data, errors, final): # type: ignore size += len(label) # Join with U+002E - result_str = '.'.join(result) + trailing_dot # type: ignore + result_bytes = b'.'.join(result) + trailing_dot size += len(trailing_dot) - return result_str, size + return result_bytes, size class IncrementalDecoder(codecs.BufferedIncrementalDecoder): - def _buffer_decode(self, data, errors, final): # type: ignore - # type: (str, str, bool) -> Tuple[str, int] + def _buffer_decode(self, data: Any, errors: str, final: bool) -> Tuple[str, int]: if errors != 'strict': raise IDNAError('Unsupported error handling \"{}\"'.format(errors)) if not data: return ('', 0) + if not isinstance(data, str): + data = str(data, 'ascii') + labels = _unicode_dots_re.split(data) trailing_dot = '' if labels: @@ -103,11 +102,11 @@ class StreamReader(Codec, codecs.StreamReader): pass -def getregentry(): - # type: () -> codecs.CodecInfo - # Compatibility as a search_function for codecs.register() +def search_function(name: str) -> Optional[codecs.CodecInfo]: + if name != 'idna2008': + return None return codecs.CodecInfo( - name='idna', + name=name, encode=Codec().encode, # type: ignore decode=Codec().decode, # type: ignore incrementalencoder=IncrementalEncoder, @@ -115,3 +114,5 @@ def getregentry(): streamwriter=StreamWriter, streamreader=StreamReader, ) + +codecs.register(search_function) diff --git a/packages/idna/compat.py b/packages/idna/compat.py index dc896c766..786e6bda6 100644 --- a/packages/idna/compat.py +++ b/packages/idna/compat.py @@ -2,15 +2,12 @@ from .codec import * from typing import Any, Union -def ToASCII(label): - # type: (str) -> bytes +def ToASCII(label: str) -> bytes: return encode(label) -def ToUnicode(label): - # type: (Union[bytes, bytearray]) -> str +def ToUnicode(label: Union[bytes, bytearray]) -> str: return decode(label) -def nameprep(s): - # type: (Any) -> None +def nameprep(s: Any) -> None: raise NotImplementedError('IDNA 2008 does not utilise nameprep protocol') diff --git a/packages/idna/core.py b/packages/idna/core.py index d6051297d..0bd89a3c2 100644 --- a/packages/idna/core.py +++ b/packages/idna/core.py @@ -29,43 +29,36 @@ class InvalidCodepointContext(IDNAError): pass -def _combining_class(cp): - # type: (int) -> int +def _combining_class(cp: int) -> int: v = unicodedata.combining(chr(cp)) if v == 0: if not unicodedata.name(chr(cp)): raise ValueError('Unknown character in unicodedata') return v -def _is_script(cp, script): - # type: (str, str) -> bool +def _is_script(cp: str, script: str) -> bool: return intranges_contain(ord(cp), idnadata.scripts[script]) -def _punycode(s): - # type: (str) -> bytes +def _punycode(s: str) -> bytes: return s.encode('punycode') -def _unot(s): - # type: (int) -> str +def _unot(s: int) -> str: return 'U+{:04X}'.format(s) -def valid_label_length(label): - # type: (Union[bytes, str]) -> bool +def valid_label_length(label: Union[bytes, str]) -> bool: if len(label) > 63: return False return True -def valid_string_length(label, trailing_dot): - # type: (Union[bytes, str], bool) -> bool +def valid_string_length(label: Union[bytes, str], trailing_dot: bool) -> bool: if len(label) > (254 if trailing_dot else 253): return False return True -def check_bidi(label, check_ltr=False): - # type: (str, bool) -> bool +def check_bidi(label: str, check_ltr: bool = False) -> bool: # Bidi rules should only be applied if string contains RTL characters bidi_label = False for (idx, cp) in enumerate(label, 1): @@ -124,15 +117,13 @@ def check_bidi(label, check_ltr=False): return True -def check_initial_combiner(label): - # type: (str) -> bool +def check_initial_combiner(label: str) -> bool: if unicodedata.category(label[0])[0] == 'M': raise IDNAError('Label begins with an illegal combining character') return True -def check_hyphen_ok(label): - # type: (str) -> bool +def check_hyphen_ok(label: str) -> bool: if label[2:4] == '--': raise IDNAError('Label has disallowed hyphens in 3rd and 4th position') if label[0] == '-' or label[-1] == '-': @@ -140,14 +131,12 @@ def check_hyphen_ok(label): return True -def check_nfc(label): - # type: (str) -> None +def check_nfc(label: str) -> None: if unicodedata.normalize('NFC', label) != label: raise IDNAError('Label must be in Normalization Form C') -def valid_contextj(label, pos): - # type: (str, int) -> bool +def valid_contextj(label: str, pos: int) -> bool: cp_value = ord(label[pos]) if cp_value == 0x200c: @@ -190,8 +179,7 @@ def valid_contextj(label, pos): return False -def valid_contexto(label, pos, exception=False): - # type: (str, int, bool) -> bool +def valid_contexto(label: str, pos: int, exception: bool = False) -> bool: cp_value = ord(label[pos]) if cp_value == 0x00b7: @@ -233,8 +221,7 @@ def valid_contexto(label, pos, exception=False): return False -def check_label(label): - # type: (Union[str, bytes, bytearray]) -> None +def check_label(label: Union[str, bytes, bytearray]) -> None: if isinstance(label, (bytes, bytearray)): label = label.decode('utf-8') if len(label) == 0: @@ -265,8 +252,7 @@ def check_label(label): check_bidi(label) -def alabel(label): - # type: (str) -> bytes +def alabel(label: str) -> bytes: try: label_bytes = label.encode('ascii') ulabel(label_bytes) @@ -290,8 +276,7 @@ def alabel(label): return label_bytes -def ulabel(label): - # type: (Union[str, bytes, bytearray]) -> str +def ulabel(label: Union[str, bytes, bytearray]) -> str: if not isinstance(label, (bytes, bytearray)): try: label_bytes = label.encode('ascii') @@ -312,13 +297,15 @@ def ulabel(label): check_label(label_bytes) return label_bytes.decode('ascii') - label = label_bytes.decode('punycode') + try: + label = label_bytes.decode('punycode') + except UnicodeError: + raise IDNAError('Invalid A-label') check_label(label) return label -def uts46_remap(domain, std3_rules=True, transitional=False): - # type: (str, bool, bool) -> str +def uts46_remap(domain: str, std3_rules: bool = True, transitional: bool = False) -> str: """Re-map the characters in the string according to UTS46 processing.""" from .uts46data import uts46data output = '' @@ -350,10 +337,12 @@ def uts46_remap(domain, std3_rules=True, transitional=False): return unicodedata.normalize('NFC', output) -def encode(s, strict=False, uts46=False, std3_rules=False, transitional=False): - # type: (Union[str, bytes, bytearray], bool, bool, bool, bool) -> bytes - if isinstance(s, (bytes, bytearray)): - s = s.decode('ascii') +def encode(s: Union[str, bytes, bytearray], strict: bool = False, uts46: bool = False, std3_rules: bool = False, transitional: bool = False) -> bytes: + if not isinstance(s, str): + try: + s = str(s, 'ascii') + except UnicodeDecodeError: + raise IDNAError('should pass a unicode string to the function rather than a byte string.') if uts46: s = uts46_remap(s, std3_rules, transitional) trailing_dot = False @@ -381,10 +370,12 @@ def encode(s, strict=False, uts46=False, std3_rules=False, transitional=False): return s -def decode(s, strict=False, uts46=False, std3_rules=False): - # type: (Union[str, bytes, bytearray], bool, bool, bool) -> str - if isinstance(s, (bytes, bytearray)): - s = s.decode('ascii') +def decode(s: Union[str, bytes, bytearray], strict: bool = False, uts46: bool = False, std3_rules: bool = False) -> str: + try: + if not isinstance(s, str): + s = str(s, 'ascii') + except UnicodeDecodeError: + raise IDNAError('Invalid ASCII in A-label') if uts46: s = uts46_remap(s, std3_rules, False) trailing_dot = False diff --git a/packages/idna/idnadata.py b/packages/idna/idnadata.py index b86a3e06e..f9bc0d85c 100644 --- a/packages/idna/idnadata.py +++ b/packages/idna/idnadata.py @@ -1,6 +1,6 @@ # This file is automatically generated by tools/idna-data -__version__ = '13.0.0' +__version__ = '15.0.0' scripts = { 'Greek': ( 0x37000000374, @@ -49,17 +49,19 @@ 0x30210000302a, 0x30380000303c, 0x340000004dc0, - 0x4e0000009ffd, + 0x4e000000a000, 0xf9000000fa6e, 0xfa700000fada, + 0x16fe200016fe4, 0x16ff000016ff2, - 0x200000002a6de, - 0x2a7000002b735, + 0x200000002a6e0, + 0x2a7000002b73a, 0x2b7400002b81e, 0x2b8200002cea2, 0x2ceb00002ebe1, 0x2f8000002fa1e, 0x300000003134b, + 0x31350000323b0, ), 'Hebrew': ( 0x591000005c8, @@ -75,7 +77,8 @@ 'Hiragana': ( 0x304100003097, 0x309d000030a0, - 0x1b0010001b11f, + 0x1b0010001b120, + 0x1b1320001b133, 0x1b1500001b153, 0x1f2000001f201, ), @@ -87,7 +90,12 @@ 0x330000003358, 0xff660000ff70, 0xff710000ff9e, + 0x1aff00001aff4, + 0x1aff50001affc, + 0x1affd0001afff, 0x1b0000001b001, + 0x1b1200001b123, + 0x1b1550001b156, 0x1b1640001b168, ), } @@ -405,6 +413,39 @@ 0x868: 68, 0x869: 82, 0x86a: 82, + 0x870: 82, + 0x871: 82, + 0x872: 82, + 0x873: 82, + 0x874: 82, + 0x875: 82, + 0x876: 82, + 0x877: 82, + 0x878: 82, + 0x879: 82, + 0x87a: 82, + 0x87b: 82, + 0x87c: 82, + 0x87d: 82, + 0x87e: 82, + 0x87f: 82, + 0x880: 82, + 0x881: 82, + 0x882: 82, + 0x883: 67, + 0x884: 67, + 0x885: 67, + 0x886: 68, + 0x887: 85, + 0x888: 85, + 0x889: 68, + 0x88a: 68, + 0x88b: 68, + 0x88c: 68, + 0x88d: 68, + 0x88e: 82, + 0x890: 85, + 0x891: 85, 0x8a0: 68, 0x8a1: 68, 0x8a2: 68, @@ -426,6 +467,7 @@ 0x8b2: 82, 0x8b3: 68, 0x8b4: 68, + 0x8b5: 68, 0x8b6: 68, 0x8b7: 68, 0x8b8: 68, @@ -444,6 +486,7 @@ 0x8c5: 68, 0x8c6: 68, 0x8c7: 68, + 0x8c8: 68, 0x8e2: 85, 0x1806: 85, 0x1807: 68, @@ -768,6 +811,24 @@ 0x10f52: 68, 0x10f53: 68, 0x10f54: 82, + 0x10f70: 68, + 0x10f71: 68, + 0x10f72: 68, + 0x10f73: 68, + 0x10f74: 82, + 0x10f75: 82, + 0x10f76: 68, + 0x10f77: 68, + 0x10f78: 68, + 0x10f79: 68, + 0x10f7a: 68, + 0x10f7b: 68, + 0x10f7c: 68, + 0x10f7d: 68, + 0x10f7e: 68, + 0x10f7f: 68, + 0x10f80: 68, + 0x10f81: 68, 0x10fb0: 68, 0x10fb1: 85, 0x10fb2: 68, @@ -1168,9 +1229,9 @@ 0x8000000082e, 0x8400000085c, 0x8600000086b, - 0x8a0000008b5, - 0x8b6000008c8, - 0x8d3000008e2, + 0x87000000888, + 0x8890000088f, + 0x898000008e2, 0x8e300000958, 0x96000000964, 0x96600000970, @@ -1252,11 +1313,12 @@ 0xc0e00000c11, 0xc1200000c29, 0xc2a00000c3a, - 0xc3d00000c45, + 0xc3c00000c45, 0xc4600000c49, 0xc4a00000c4e, 0xc5500000c57, 0xc5800000c5b, + 0xc5d00000c5e, 0xc6000000c64, 0xc6600000c70, 0xc8000000c84, @@ -1269,10 +1331,10 @@ 0xcc600000cc9, 0xcca00000cce, 0xcd500000cd7, - 0xcde00000cdf, + 0xcdd00000cdf, 0xce000000ce4, 0xce600000cf0, - 0xcf100000cf3, + 0xcf100000cf4, 0xd0000000d0d, 0xd0e00000d11, 0xd1200000d45, @@ -1307,7 +1369,7 @@ 0xeb400000ebe, 0xec000000ec5, 0xec600000ec7, - 0xec800000ece, + 0xec800000ecf, 0xed000000eda, 0xede00000ee0, 0xf0000000f01, @@ -1366,9 +1428,8 @@ 0x16810000169b, 0x16a0000016eb, 0x16f1000016f9, - 0x17000000170d, - 0x170e00001715, - 0x172000001735, + 0x170000001716, + 0x171f00001735, 0x174000001754, 0x17600000176d, 0x176e00001771, @@ -1397,8 +1458,8 @@ 0x1a9000001a9a, 0x1aa700001aa8, 0x1ab000001abe, - 0x1abf00001ac1, - 0x1b0000001b4c, + 0x1abf00001acf, + 0x1b0000001b4d, 0x1b5000001b5a, 0x1b6b00001b74, 0x1b8000001bf4, @@ -1413,8 +1474,7 @@ 0x1d4e00001d4f, 0x1d6b00001d78, 0x1d7900001d9b, - 0x1dc000001dfa, - 0x1dfb00001e00, + 0x1dc000001e00, 0x1e0100001e02, 0x1e0300001e04, 0x1e0500001e06, @@ -1563,7 +1623,7 @@ 0x1ff600001ff7, 0x214e0000214f, 0x218400002185, - 0x2c3000002c5f, + 0x2c3000002c60, 0x2c6100002c62, 0x2c6500002c67, 0x2c6800002c69, @@ -1652,8 +1712,7 @@ 0x31a0000031c0, 0x31f000003200, 0x340000004dc0, - 0x4e0000009ffd, - 0xa0000000a48d, + 0x4e000000a48d, 0xa4d00000a4fe, 0xa5000000a60d, 0xa6100000a62c, @@ -1766,9 +1825,15 @@ 0xa7bb0000a7bc, 0xa7bd0000a7be, 0xa7bf0000a7c0, + 0xa7c10000a7c2, 0xa7c30000a7c4, 0xa7c80000a7c9, 0xa7ca0000a7cb, + 0xa7d10000a7d2, + 0xa7d30000a7d4, + 0xa7d50000a7d6, + 0xa7d70000a7d8, + 0xa7d90000a7da, 0xa7f60000a7f8, 0xa7fa0000a828, 0xa82c0000a82d, @@ -1796,7 +1861,7 @@ 0xab200000ab27, 0xab280000ab2f, 0xab300000ab5b, - 0xab600000ab6a, + 0xab600000ab69, 0xabc00000abeb, 0xabec0000abee, 0xabf00000abfa, @@ -1834,9 +1899,14 @@ 0x104d8000104fc, 0x1050000010528, 0x1053000010564, + 0x10597000105a2, + 0x105a3000105b2, + 0x105b3000105ba, + 0x105bb000105bd, 0x1060000010737, 0x1074000010756, 0x1076000010768, + 0x1078000010781, 0x1080000010806, 0x1080800010809, 0x1080a00010836, @@ -1873,14 +1943,16 @@ 0x10e8000010eaa, 0x10eab00010ead, 0x10eb000010eb2, - 0x10f0000010f1d, + 0x10efd00010f1d, 0x10f2700010f28, 0x10f3000010f51, + 0x10f7000010f86, 0x10fb000010fc5, 0x10fe000010ff7, 0x1100000011047, - 0x1106600011070, + 0x1106600011076, 0x1107f000110bb, + 0x110c2000110c3, 0x110d0000110e9, 0x110f0000110fa, 0x1110000011135, @@ -1894,7 +1966,7 @@ 0x111dc000111dd, 0x1120000011212, 0x1121300011238, - 0x1123e0001123f, + 0x1123e00011242, 0x1128000011287, 0x1128800011289, 0x1128a0001128e, @@ -1934,6 +2006,7 @@ 0x117000001171b, 0x1171d0001172c, 0x117300001173a, + 0x1174000011747, 0x118000001183b, 0x118c0000118ea, 0x118ff00011907, @@ -1952,7 +2025,7 @@ 0x11a4700011a48, 0x11a5000011a9a, 0x11a9d00011a9e, - 0x11ac000011af9, + 0x11ab000011af9, 0x11c0000011c09, 0x11c0a00011c37, 0x11c3800011c41, @@ -1974,14 +2047,22 @@ 0x11d9300011d99, 0x11da000011daa, 0x11ee000011ef7, + 0x11f0000011f11, + 0x11f1200011f3b, + 0x11f3e00011f43, + 0x11f5000011f5a, 0x11fb000011fb1, 0x120000001239a, 0x1248000012544, - 0x130000001342f, + 0x12f9000012ff1, + 0x1300000013430, + 0x1344000013456, 0x1440000014647, 0x1680000016a39, 0x16a4000016a5f, 0x16a6000016a6a, + 0x16a7000016abf, + 0x16ac000016aca, 0x16ad000016aee, 0x16af000016af5, 0x16b0000016b37, @@ -1999,8 +2080,13 @@ 0x17000000187f8, 0x1880000018cd6, 0x18d0000018d09, - 0x1b0000001b11f, + 0x1aff00001aff4, + 0x1aff50001affc, + 0x1affd0001afff, + 0x1b0000001b123, + 0x1b1320001b133, 0x1b1500001b153, + 0x1b1550001b156, 0x1b1640001b168, 0x1b1700001b2fc, 0x1bc000001bc6b, @@ -2008,33 +2094,45 @@ 0x1bc800001bc89, 0x1bc900001bc9a, 0x1bc9d0001bc9f, + 0x1cf000001cf2e, + 0x1cf300001cf47, 0x1da000001da37, 0x1da3b0001da6d, 0x1da750001da76, 0x1da840001da85, 0x1da9b0001daa0, 0x1daa10001dab0, + 0x1df000001df1f, + 0x1df250001df2b, 0x1e0000001e007, 0x1e0080001e019, 0x1e01b0001e022, 0x1e0230001e025, 0x1e0260001e02b, + 0x1e0300001e06e, + 0x1e08f0001e090, 0x1e1000001e12d, 0x1e1300001e13e, 0x1e1400001e14a, 0x1e14e0001e14f, + 0x1e2900001e2af, 0x1e2c00001e2fa, + 0x1e4d00001e4fa, + 0x1e7e00001e7e7, + 0x1e7e80001e7ec, + 0x1e7ed0001e7ef, + 0x1e7f00001e7ff, 0x1e8000001e8c5, 0x1e8d00001e8d7, 0x1e9220001e94c, 0x1e9500001e95a, - 0x1fbf00001fbfa, - 0x200000002a6de, - 0x2a7000002b735, + 0x200000002a6e0, + 0x2a7000002b73a, 0x2b7400002b81e, 0x2b8200002cea2, 0x2ceb00002ebe1, 0x300000003134b, + 0x31350000323b0, ), 'CONTEXTJ': ( 0x200c0000200e, diff --git a/packages/idna/intranges.py b/packages/idna/intranges.py index ee364a904..6a43b0475 100644 --- a/packages/idna/intranges.py +++ b/packages/idna/intranges.py @@ -8,8 +8,7 @@ import bisect from typing import List, Tuple -def intranges_from_list(list_): - # type: (List[int]) -> Tuple[int, ...] +def intranges_from_list(list_: List[int]) -> Tuple[int, ...]: """Represent a list of integers as a sequence of ranges: ((start_0, end_0), (start_1, end_1), ...), such that the original integers are exactly those x such that start_i <= x < end_i for some i. @@ -30,17 +29,14 @@ def intranges_from_list(list_): return tuple(ranges) -def _encode_range(start, end): - # type: (int, int) -> int +def _encode_range(start: int, end: int) -> int: return (start << 32) | end -def _decode_range(r): - # type: (int) -> Tuple[int, int] +def _decode_range(r: int) -> Tuple[int, int]: return (r >> 32), (r & ((1 << 32) - 1)) -def intranges_contain(int_, ranges): - # type: (int, Tuple[int, ...]) -> bool +def intranges_contain(int_: int, ranges: Tuple[int, ...]) -> bool: """Determine if `int_` falls into one of the ranges in `ranges`.""" tuple_ = _encode_range(int_, 0) pos = bisect.bisect_left(ranges, tuple_) diff --git a/packages/idna/package_data.py b/packages/idna/package_data.py index e096d1d52..8501893bd 100644 --- a/packages/idna/package_data.py +++ b/packages/idna/package_data.py @@ -1,2 +1,2 @@ -__version__ = '3.2' +__version__ = '3.4' diff --git a/packages/idna/uts46data.py b/packages/idna/uts46data.py index f382ce389..186796c17 100644 --- a/packages/idna/uts46data.py +++ b/packages/idna/uts46data.py @@ -1,13 +1,14 @@ # This file is automatically generated by tools/idna-data +# vim: set fileencoding=utf-8 : from typing import List, Tuple, Union + """IDNA Mapping Table from UTS46.""" -__version__ = '13.0.0' -def _seg_0(): - # type: () -> List[Union[Tuple[int, str], Tuple[int, str, str]]] +__version__ = '15.0.0' +def _seg_0() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: return [ (0x0, '3'), (0x1, '3'), @@ -111,8 +112,7 @@ def _seg_0(): (0x63, 'V'), ] -def _seg_1(): - # type: () -> List[Union[Tuple[int, str], Tuple[int, str, str]]] +def _seg_1() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: return [ (0x64, 'V'), (0x65, 'V'), @@ -216,8 +216,7 @@ def _seg_1(): (0xC7, 'M', 'ç'), ] -def _seg_2(): - # type: () -> List[Union[Tuple[int, str], Tuple[int, str, str]]] +def _seg_2() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: return [ (0xC8, 'M', 'è'), (0xC9, 'M', 'é'), @@ -321,8 +320,7 @@ def _seg_2(): (0x12B, 'V'), ] -def _seg_3(): - # type: () -> List[Union[Tuple[int, str], Tuple[int, str, str]]] +def _seg_3() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: return [ (0x12C, 'M', 'ĭ'), (0x12D, 'V'), @@ -426,8 +424,7 @@ def _seg_3(): (0x193, 'M', 'ɠ'), ] -def _seg_4(): - # type: () -> List[Union[Tuple[int, str], Tuple[int, str, str]]] +def _seg_4() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: return [ (0x194, 'M', 'ɣ'), (0x195, 'V'), @@ -531,8 +528,7 @@ def _seg_4(): (0x20C, 'M', 'ȍ'), ] -def _seg_5(): - # type: () -> List[Union[Tuple[int, str], Tuple[int, str, str]]] +def _seg_5() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: return [ (0x20D, 'V'), (0x20E, 'M', 'ȏ'), @@ -636,8 +632,7 @@ def _seg_5(): (0x377, 'V'), ] -def _seg_6(): - # type: () -> List[Union[Tuple[int, str], Tuple[int, str, str]]] +def _seg_6() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: return [ (0x378, 'X'), (0x37A, '3', ' ι'), @@ -741,8 +736,7 @@ def _seg_6(): (0x402, 'M', 'ђ'), ] -def _seg_7(): - # type: () -> List[Union[Tuple[int, str], Tuple[int, str, str]]] +def _seg_7() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: return [ (0x403, 'M', 'ѓ'), (0x404, 'M', 'є'), @@ -846,8 +840,7 @@ def _seg_7(): (0x49D, 'V'), ] -def _seg_8(): - # type: () -> List[Union[Tuple[int, str], Tuple[int, str, str]]] +def _seg_8() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: return [ (0x49E, 'M', 'ҟ'), (0x49F, 'V'), @@ -951,8 +944,7 @@ def _seg_8(): (0x502, 'M', 'ԃ'), ] -def _seg_9(): - # type: () -> List[Union[Tuple[int, str], Tuple[int, str, str]]] +def _seg_9() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: return [ (0x503, 'V'), (0x504, 'M', 'ԅ'), @@ -1053,11 +1045,10 @@ def _seg_9(): (0x5F5, 'X'), (0x606, 'V'), (0x61C, 'X'), - (0x61E, 'V'), + (0x61D, 'V'), ] -def _seg_10(): - # type: () -> List[Union[Tuple[int, str], Tuple[int, str, str]]] +def _seg_10() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: return [ (0x675, 'M', 'اٴ'), (0x676, 'M', 'وٴ'), @@ -1083,11 +1074,9 @@ def _seg_10(): (0x85F, 'X'), (0x860, 'V'), (0x86B, 'X'), - (0x8A0, 'V'), - (0x8B5, 'X'), - (0x8B6, 'V'), - (0x8C8, 'X'), - (0x8D3, 'V'), + (0x870, 'V'), + (0x88F, 'X'), + (0x898, 'V'), (0x8E2, 'X'), (0x8E3, 'V'), (0x958, 'M', 'क़'), @@ -1159,13 +1148,12 @@ def _seg_10(): (0xA59, 'M', 'ਖ਼'), (0xA5A, 'M', 'ਗ਼'), (0xA5B, 'M', 'ਜ਼'), + (0xA5C, 'V'), + (0xA5D, 'X'), ] -def _seg_11(): - # type: () -> List[Union[Tuple[int, str], Tuple[int, str, str]]] +def _seg_11() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: return [ - (0xA5C, 'V'), - (0xA5D, 'X'), (0xA5E, 'M', 'ਫ਼'), (0xA5F, 'X'), (0xA66, 'V'), @@ -1264,15 +1252,14 @@ def _seg_11(): (0xC0E, 'V'), (0xC11, 'X'), (0xC12, 'V'), + (0xC29, 'X'), + (0xC2A, 'V'), ] -def _seg_12(): - # type: () -> List[Union[Tuple[int, str], Tuple[int, str, str]]] +def _seg_12() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: return [ - (0xC29, 'X'), - (0xC2A, 'V'), (0xC3A, 'X'), - (0xC3D, 'V'), + (0xC3C, 'V'), (0xC45, 'X'), (0xC46, 'V'), (0xC49, 'X'), @@ -1282,6 +1269,8 @@ def _seg_12(): (0xC57, 'X'), (0xC58, 'V'), (0xC5B, 'X'), + (0xC5D, 'V'), + (0xC5E, 'X'), (0xC60, 'V'), (0xC64, 'X'), (0xC66, 'V'), @@ -1304,14 +1293,14 @@ def _seg_12(): (0xCCE, 'X'), (0xCD5, 'V'), (0xCD7, 'X'), - (0xCDE, 'V'), + (0xCDD, 'V'), (0xCDF, 'X'), (0xCE0, 'V'), (0xCE4, 'X'), (0xCE6, 'V'), (0xCF0, 'X'), (0xCF1, 'V'), - (0xCF3, 'X'), + (0xCF4, 'X'), (0xD00, 'V'), (0xD0D, 'X'), (0xD0E, 'V'), @@ -1371,8 +1360,7 @@ def _seg_12(): (0xEB4, 'V'), ] -def _seg_13(): - # type: () -> List[Union[Tuple[int, str], Tuple[int, str, str]]] +def _seg_13() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: return [ (0xEBE, 'X'), (0xEC0, 'V'), @@ -1380,7 +1368,7 @@ def _seg_13(): (0xEC6, 'V'), (0xEC7, 'X'), (0xEC8, 'V'), - (0xECE, 'X'), + (0xECF, 'X'), (0xED0, 'V'), (0xEDA, 'X'), (0xEDC, 'M', 'ຫນ'), @@ -1476,8 +1464,7 @@ def _seg_13(): (0x1312, 'V'), ] -def _seg_14(): - # type: () -> List[Union[Tuple[int, str], Tuple[int, str, str]]] +def _seg_14() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: return [ (0x1316, 'X'), (0x1318, 'V'), @@ -1502,10 +1489,8 @@ def _seg_14(): (0x16A0, 'V'), (0x16F9, 'X'), (0x1700, 'V'), - (0x170D, 'X'), - (0x170E, 'V'), - (0x1715, 'X'), - (0x1720, 'V'), + (0x1716, 'X'), + (0x171F, 'V'), (0x1737, 'X'), (0x1740, 'V'), (0x1754, 'X'), @@ -1528,6 +1513,7 @@ def _seg_14(): (0x1807, 'V'), (0x180B, 'I'), (0x180E, 'X'), + (0x180F, 'I'), (0x1810, 'V'), (0x181A, 'X'), (0x1820, 'V'), @@ -1567,11 +1553,11 @@ def _seg_14(): (0x1AA0, 'V'), (0x1AAE, 'X'), (0x1AB0, 'V'), - (0x1AC1, 'X'), + (0x1ACF, 'X'), (0x1B00, 'V'), - (0x1B4C, 'X'), + (0x1B4D, 'X'), (0x1B50, 'V'), - (0x1B7D, 'X'), + (0x1B7F, 'X'), (0x1B80, 'V'), (0x1BF4, 'X'), (0x1BFC, 'V'), @@ -1579,12 +1565,11 @@ def _seg_14(): (0x1C3B, 'V'), (0x1C4A, 'X'), (0x1C4D, 'V'), + (0x1C80, 'M', 'в'), ] -def _seg_15(): - # type: () -> List[Union[Tuple[int, str], Tuple[int, str, str]]] +def _seg_15() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: return [ - (0x1C80, 'M', 'в'), (0x1C81, 'M', 'д'), (0x1C82, 'M', 'о'), (0x1C83, 'M', 'с'), @@ -1684,12 +1669,11 @@ def _seg_15(): (0x1D50, 'M', 'm'), (0x1D51, 'M', 'ŋ'), (0x1D52, 'M', 'o'), + (0x1D53, 'M', 'ɔ'), ] -def _seg_16(): - # type: () -> List[Union[Tuple[int, str], Tuple[int, str, str]]] +def _seg_16() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: return [ - (0x1D53, 'M', 'ɔ'), (0x1D54, 'M', 'ᴖ'), (0x1D55, 'M', 'ᴗ'), (0x1D56, 'M', 'p'), @@ -1754,8 +1738,6 @@ def _seg_16(): (0x1DBE, 'M', 'ʒ'), (0x1DBF, 'M', 'θ'), (0x1DC0, 'V'), - (0x1DFA, 'X'), - (0x1DFB, 'V'), (0x1E00, 'M', 'ḁ'), (0x1E01, 'V'), (0x1E02, 'M', 'ḃ'), @@ -1789,14 +1771,13 @@ def _seg_16(): (0x1E1E, 'M', 'ḟ'), (0x1E1F, 'V'), (0x1E20, 'M', 'ḡ'), - ] - -def _seg_17(): - # type: () -> List[Union[Tuple[int, str], Tuple[int, str, str]]] - return [ (0x1E21, 'V'), (0x1E22, 'M', 'ḣ'), (0x1E23, 'V'), + ] + +def _seg_17() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x1E24, 'M', 'ḥ'), (0x1E25, 'V'), (0x1E26, 'M', 'ḧ'), @@ -1894,14 +1875,13 @@ def _seg_17(): (0x1E82, 'M', 'ẃ'), (0x1E83, 'V'), (0x1E84, 'M', 'ẅ'), - ] - -def _seg_18(): - # type: () -> List[Union[Tuple[int, str], Tuple[int, str, str]]] - return [ (0x1E85, 'V'), (0x1E86, 'M', 'ẇ'), (0x1E87, 'V'), + ] + +def _seg_18() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x1E88, 'M', 'ẉ'), (0x1E89, 'V'), (0x1E8A, 'M', 'ẋ'), @@ -1999,14 +1979,13 @@ def _seg_18(): (0x1EEB, 'V'), (0x1EEC, 'M', 'ử'), (0x1EED, 'V'), - ] - -def _seg_19(): - # type: () -> List[Union[Tuple[int, str], Tuple[int, str, str]]] - return [ (0x1EEE, 'M', 'ữ'), (0x1EEF, 'V'), (0x1EF0, 'M', 'ự'), + ] + +def _seg_19() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x1EF1, 'V'), (0x1EF2, 'M', 'ỳ'), (0x1EF3, 'V'), @@ -2104,14 +2083,13 @@ def _seg_19(): (0x1F82, 'M', 'ἂι'), (0x1F83, 'M', 'ἃι'), (0x1F84, 'M', 'ἄι'), - ] - -def _seg_20(): - # type: () -> List[Union[Tuple[int, str], Tuple[int, str, str]]] - return [ (0x1F85, 'M', 'ἅι'), (0x1F86, 'M', 'ἆι'), (0x1F87, 'M', 'ἇι'), + ] + +def _seg_20() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x1F88, 'M', 'ἀι'), (0x1F89, 'M', 'ἁι'), (0x1F8A, 'M', 'ἂι'), @@ -2209,14 +2187,13 @@ def _seg_20(): (0x1FF0, 'X'), (0x1FF2, 'M', 'ὼι'), (0x1FF3, 'M', 'ωι'), - ] - -def _seg_21(): - # type: () -> List[Union[Tuple[int, str], Tuple[int, str, str]]] - return [ (0x1FF4, 'M', 'ώι'), (0x1FF5, 'X'), (0x1FF6, 'V'), + ] + +def _seg_21() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x1FF7, 'M', 'ῶι'), (0x1FF8, 'M', 'ὸ'), (0x1FF9, 'M', 'ό'), @@ -2309,19 +2286,18 @@ def _seg_21(): (0x20A0, 'V'), (0x20A8, 'M', 'rs'), (0x20A9, 'V'), - (0x20C0, 'X'), + (0x20C1, 'X'), (0x20D0, 'V'), (0x20F1, 'X'), (0x2100, '3', 'a/c'), (0x2101, '3', 'a/s'), - ] - -def _seg_22(): - # type: () -> List[Union[Tuple[int, str], Tuple[int, str, str]]] - return [ (0x2102, 'M', 'c'), (0x2103, 'M', '°c'), (0x2104, 'V'), + ] + +def _seg_22() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x2105, '3', 'c/o'), (0x2106, '3', 'c/u'), (0x2107, 'M', 'ɛ'), @@ -2419,14 +2395,13 @@ def _seg_22(): (0x2177, 'M', 'viii'), (0x2178, 'M', 'ix'), (0x2179, 'M', 'x'), - ] - -def _seg_23(): - # type: () -> List[Union[Tuple[int, str], Tuple[int, str, str]]] - return [ (0x217A, 'M', 'xi'), (0x217B, 'M', 'xii'), (0x217C, 'M', 'l'), + ] + +def _seg_23() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x217D, 'M', 'c'), (0x217E, 'M', 'd'), (0x217F, 'M', 'm'), @@ -2524,14 +2499,13 @@ def _seg_23(): (0x24B7, 'M', 'b'), (0x24B8, 'M', 'c'), (0x24B9, 'M', 'd'), - ] - -def _seg_24(): - # type: () -> List[Union[Tuple[int, str], Tuple[int, str, str]]] - return [ (0x24BA, 'M', 'e'), (0x24BB, 'M', 'f'), (0x24BC, 'M', 'g'), + ] + +def _seg_24() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x24BD, 'M', 'h'), (0x24BE, 'M', 'i'), (0x24BF, 'M', 'j'), @@ -2629,23 +2603,21 @@ def _seg_24(): (0x2C23, 'M', 'ⱓ'), (0x2C24, 'M', 'ⱔ'), (0x2C25, 'M', 'ⱕ'), - ] - -def _seg_25(): - # type: () -> List[Union[Tuple[int, str], Tuple[int, str, str]]] - return [ (0x2C26, 'M', 'ⱖ'), (0x2C27, 'M', 'ⱗ'), (0x2C28, 'M', 'ⱘ'), + ] + +def _seg_25() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x2C29, 'M', 'ⱙ'), (0x2C2A, 'M', 'ⱚ'), (0x2C2B, 'M', 'ⱛ'), (0x2C2C, 'M', 'ⱜ'), (0x2C2D, 'M', 'ⱝ'), (0x2C2E, 'M', 'ⱞ'), - (0x2C2F, 'X'), + (0x2C2F, 'M', 'ⱟ'), (0x2C30, 'V'), - (0x2C5F, 'X'), (0x2C60, 'M', 'ⱡ'), (0x2C61, 'V'), (0x2C62, 'M', 'ɫ'), @@ -2734,15 +2706,14 @@ def _seg_25(): (0x2CBC, 'M', 'ⲽ'), (0x2CBD, 'V'), (0x2CBE, 'M', 'ⲿ'), - ] - -def _seg_26(): - # type: () -> List[Union[Tuple[int, str], Tuple[int, str, str]]] - return [ (0x2CBF, 'V'), (0x2CC0, 'M', 'ⳁ'), (0x2CC1, 'V'), (0x2CC2, 'M', 'ⳃ'), + ] + +def _seg_26() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x2CC3, 'V'), (0x2CC4, 'M', 'ⳅ'), (0x2CC5, 'V'), @@ -2813,7 +2784,7 @@ def _seg_26(): (0x2DD8, 'V'), (0x2DDF, 'X'), (0x2DE0, 'V'), - (0x2E53, 'X'), + (0x2E5E, 'X'), (0x2E80, 'V'), (0x2E9A, 'X'), (0x2E9B, 'V'), @@ -2839,15 +2810,14 @@ def _seg_26(): (0x2F0F, 'M', '几'), (0x2F10, 'M', '凵'), (0x2F11, 'M', '刀'), - ] - -def _seg_27(): - # type: () -> List[Union[Tuple[int, str], Tuple[int, str, str]]] - return [ (0x2F12, 'M', '力'), (0x2F13, 'M', '勹'), (0x2F14, 'M', '匕'), (0x2F15, 'M', '匚'), + ] + +def _seg_27() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x2F16, 'M', '匸'), (0x2F17, 'M', '十'), (0x2F18, 'M', '卜'), @@ -2944,15 +2914,14 @@ def _seg_27(): (0x2F73, 'M', '穴'), (0x2F74, 'M', '立'), (0x2F75, 'M', '竹'), - ] - -def _seg_28(): - # type: () -> List[Union[Tuple[int, str], Tuple[int, str, str]]] - return [ (0x2F76, 'M', '米'), (0x2F77, 'M', '糸'), (0x2F78, 'M', '缶'), (0x2F79, 'M', '网'), + ] + +def _seg_28() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x2F7A, 'M', '羊'), (0x2F7B, 'M', '羽'), (0x2F7C, 'M', '老'), @@ -3049,15 +3018,14 @@ def _seg_28(): (0x3000, '3', ' '), (0x3001, 'V'), (0x3002, 'M', '.'), - ] - -def _seg_29(): - # type: () -> List[Union[Tuple[int, str], Tuple[int, str, str]]] - return [ (0x3003, 'V'), (0x3036, 'M', '〒'), (0x3037, 'V'), (0x3038, 'M', '十'), + ] + +def _seg_29() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x3039, 'M', '卄'), (0x303A, 'M', '卅'), (0x303B, 'V'), @@ -3154,15 +3122,14 @@ def _seg_29(): (0x317E, 'M', 'ᄶ'), (0x317F, 'M', 'ᅀ'), (0x3180, 'M', 'ᅇ'), - ] - -def _seg_30(): - # type: () -> List[Union[Tuple[int, str], Tuple[int, str, str]]] - return [ (0x3181, 'M', 'ᅌ'), (0x3182, 'M', 'ᇱ'), (0x3183, 'M', 'ᇲ'), (0x3184, 'M', 'ᅗ'), + ] + +def _seg_30() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x3185, 'M', 'ᅘ'), (0x3186, 'M', 'ᅙ'), (0x3187, 'M', 'ᆄ'), @@ -3259,15 +3226,14 @@ def _seg_30(): (0x3240, '3', '(祭)'), (0x3241, '3', '(休)'), (0x3242, '3', '(自)'), - ] - -def _seg_31(): - # type: () -> List[Union[Tuple[int, str], Tuple[int, str, str]]] - return [ (0x3243, '3', '(至)'), (0x3244, 'M', '問'), (0x3245, 'M', '幼'), (0x3246, 'M', '文'), + ] + +def _seg_31() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x3247, 'M', '箏'), (0x3248, 'V'), (0x3250, 'M', 'pte'), @@ -3364,15 +3330,14 @@ def _seg_31(): (0x32AB, 'M', '学'), (0x32AC, 'M', '監'), (0x32AD, 'M', '企'), - ] - -def _seg_32(): - # type: () -> List[Union[Tuple[int, str], Tuple[int, str, str]]] - return [ (0x32AE, 'M', '資'), (0x32AF, 'M', '協'), (0x32B0, 'M', '夜'), (0x32B1, 'M', '36'), + ] + +def _seg_32() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x32B2, 'M', '37'), (0x32B3, 'M', '38'), (0x32B4, 'M', '39'), @@ -3469,15 +3434,14 @@ def _seg_32(): (0x330F, 'M', 'ガンマ'), (0x3310, 'M', 'ギガ'), (0x3311, 'M', 'ギニー'), - ] - -def _seg_33(): - # type: () -> List[Union[Tuple[int, str], Tuple[int, str, str]]] - return [ (0x3312, 'M', 'キュリー'), (0x3313, 'M', 'ギルダー'), (0x3314, 'M', 'キロ'), (0x3315, 'M', 'キログラム'), + ] + +def _seg_33() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x3316, 'M', 'キロメートル'), (0x3317, 'M', 'キロワット'), (0x3318, 'M', 'グラム'), @@ -3574,15 +3538,14 @@ def _seg_33(): (0x3373, 'M', 'au'), (0x3374, 'M', 'bar'), (0x3375, 'M', 'ov'), - ] - -def _seg_34(): - # type: () -> List[Union[Tuple[int, str], Tuple[int, str, str]]] - return [ (0x3376, 'M', 'pc'), (0x3377, 'M', 'dm'), (0x3378, 'M', 'dm2'), (0x3379, 'M', 'dm3'), + ] + +def _seg_34() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x337A, 'M', 'iu'), (0x337B, 'M', '平成'), (0x337C, 'M', '昭和'), @@ -3679,15 +3642,14 @@ def _seg_34(): (0x33D7, 'M', 'ph'), (0x33D8, 'X'), (0x33D9, 'M', 'ppm'), - ] - -def _seg_35(): - # type: () -> List[Union[Tuple[int, str], Tuple[int, str, str]]] - return [ (0x33DA, 'M', 'pr'), (0x33DB, 'M', 'sr'), (0x33DC, 'M', 'sv'), (0x33DD, 'M', 'wb'), + ] + +def _seg_35() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x33DE, 'M', 'v∕m'), (0x33DF, 'M', 'a∕m'), (0x33E0, 'M', '1日'), @@ -3723,8 +3685,6 @@ def _seg_35(): (0x33FE, 'M', '31日'), (0x33FF, 'M', 'gal'), (0x3400, 'V'), - (0x9FFD, 'X'), - (0xA000, 'V'), (0xA48D, 'X'), (0xA490, 'V'), (0xA4C7, 'X'), @@ -3784,17 +3744,16 @@ def _seg_35(): (0xA685, 'V'), (0xA686, 'M', 'ꚇ'), (0xA687, 'V'), - ] - -def _seg_36(): - # type: () -> List[Union[Tuple[int, str], Tuple[int, str, str]]] - return [ (0xA688, 'M', 'ꚉ'), (0xA689, 'V'), (0xA68A, 'M', 'ꚋ'), (0xA68B, 'V'), (0xA68C, 'M', 'ꚍ'), (0xA68D, 'V'), + ] + +def _seg_36() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0xA68E, 'M', 'ꚏ'), (0xA68F, 'V'), (0xA690, 'M', 'ꚑ'), @@ -3889,17 +3848,16 @@ def _seg_36(): (0xA76C, 'M', 'ꝭ'), (0xA76D, 'V'), (0xA76E, 'M', 'ꝯ'), - ] - -def _seg_37(): - # type: () -> List[Union[Tuple[int, str], Tuple[int, str, str]]] - return [ (0xA76F, 'V'), (0xA770, 'M', 'ꝯ'), (0xA771, 'V'), (0xA779, 'M', 'ꝺ'), (0xA77A, 'V'), (0xA77B, 'M', 'ꝼ'), + ] + +def _seg_37() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0xA77C, 'V'), (0xA77D, 'M', 'ᵹ'), (0xA77E, 'M', 'ꝿ'), @@ -3962,7 +3920,8 @@ def _seg_37(): (0xA7BD, 'V'), (0xA7BE, 'M', 'ꞿ'), (0xA7BF, 'V'), - (0xA7C0, 'X'), + (0xA7C0, 'M', 'ꟁ'), + (0xA7C1, 'V'), (0xA7C2, 'M', 'ꟃ'), (0xA7C3, 'V'), (0xA7C4, 'M', 'ꞔ'), @@ -3973,6 +3932,20 @@ def _seg_37(): (0xA7C9, 'M', 'ꟊ'), (0xA7CA, 'V'), (0xA7CB, 'X'), + (0xA7D0, 'M', 'ꟑ'), + (0xA7D1, 'V'), + (0xA7D2, 'X'), + (0xA7D3, 'V'), + (0xA7D4, 'X'), + (0xA7D5, 'V'), + (0xA7D6, 'M', 'ꟗ'), + (0xA7D7, 'V'), + (0xA7D8, 'M', 'ꟙ'), + (0xA7D9, 'V'), + (0xA7DA, 'X'), + (0xA7F2, 'M', 'c'), + (0xA7F3, 'M', 'f'), + (0xA7F4, 'M', 'q'), (0xA7F5, 'M', 'ꟶ'), (0xA7F6, 'V'), (0xA7F8, 'M', 'ħ'), @@ -3985,6 +3958,10 @@ def _seg_37(): (0xA878, 'X'), (0xA880, 'V'), (0xA8C6, 'X'), + ] + +def _seg_38() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0xA8CE, 'V'), (0xA8DA, 'X'), (0xA8E0, 'V'), @@ -3994,11 +3971,6 @@ def _seg_37(): (0xA980, 'V'), (0xA9CE, 'X'), (0xA9CF, 'V'), - ] - -def _seg_38(): - # type: () -> List[Union[Tuple[int, str], Tuple[int, str, str]]] - return [ (0xA9DA, 'X'), (0xA9DE, 'V'), (0xA9FF, 'X'), @@ -4090,6 +4062,10 @@ def _seg_38(): (0xABA8, 'M', 'Ꮨ'), (0xABA9, 'M', 'Ꮩ'), (0xABAA, 'M', 'Ꮪ'), + ] + +def _seg_39() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0xABAB, 'M', 'Ꮫ'), (0xABAC, 'M', 'Ꮬ'), (0xABAD, 'M', 'Ꮭ'), @@ -4099,11 +4075,6 @@ def _seg_38(): (0xABB1, 'M', 'Ꮱ'), (0xABB2, 'M', 'Ꮲ'), (0xABB3, 'M', 'Ꮳ'), - ] - -def _seg_39(): - # type: () -> List[Union[Tuple[int, str], Tuple[int, str, str]]] - return [ (0xABB4, 'M', 'Ꮴ'), (0xABB5, 'M', 'Ꮵ'), (0xABB6, 'M', 'Ꮶ'), @@ -4195,6 +4166,10 @@ def _seg_39(): (0xF943, 'M', '弄'), (0xF944, 'M', '籠'), (0xF945, 'M', '聾'), + ] + +def _seg_40() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0xF946, 'M', '牢'), (0xF947, 'M', '磊'), (0xF948, 'M', '賂'), @@ -4204,11 +4179,6 @@ def _seg_39(): (0xF94C, 'M', '樓'), (0xF94D, 'M', '淚'), (0xF94E, 'M', '漏'), - ] - -def _seg_40(): - # type: () -> List[Union[Tuple[int, str], Tuple[int, str, str]]] - return [ (0xF94F, 'M', '累'), (0xF950, 'M', '縷'), (0xF951, 'M', '陋'), @@ -4300,6 +4270,10 @@ def _seg_40(): (0xF9A7, 'M', '獵'), (0xF9A8, 'M', '令'), (0xF9A9, 'M', '囹'), + ] + +def _seg_41() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0xF9AA, 'M', '寧'), (0xF9AB, 'M', '嶺'), (0xF9AC, 'M', '怜'), @@ -4309,11 +4283,6 @@ def _seg_40(): (0xF9B0, 'M', '聆'), (0xF9B1, 'M', '鈴'), (0xF9B2, 'M', '零'), - ] - -def _seg_41(): - # type: () -> List[Union[Tuple[int, str], Tuple[int, str, str]]] - return [ (0xF9B3, 'M', '靈'), (0xF9B4, 'M', '領'), (0xF9B5, 'M', '例'), @@ -4405,6 +4374,10 @@ def _seg_41(): (0xFA0B, 'M', '廓'), (0xFA0C, 'M', '兀'), (0xFA0D, 'M', '嗀'), + ] + +def _seg_42() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0xFA0E, 'V'), (0xFA10, 'M', '塚'), (0xFA11, 'V'), @@ -4414,11 +4387,6 @@ def _seg_41(): (0xFA16, 'M', '猪'), (0xFA17, 'M', '益'), (0xFA18, 'M', '礼'), - ] - -def _seg_42(): - # type: () -> List[Union[Tuple[int, str], Tuple[int, str, str]]] - return [ (0xFA19, 'M', '神'), (0xFA1A, 'M', '祥'), (0xFA1B, 'M', '福'), @@ -4510,6 +4478,10 @@ def _seg_42(): (0xFA76, 'M', '勇'), (0xFA77, 'M', '勺'), (0xFA78, 'M', '喝'), + ] + +def _seg_43() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0xFA79, 'M', '啕'), (0xFA7A, 'M', '喙'), (0xFA7B, 'M', '嗢'), @@ -4519,11 +4491,6 @@ def _seg_42(): (0xFA7F, 'M', '奔'), (0xFA80, 'M', '婢'), (0xFA81, 'M', '嬨'), - ] - -def _seg_43(): - # type: () -> List[Union[Tuple[int, str], Tuple[int, str, str]]] - return [ (0xFA82, 'M', '廒'), (0xFA83, 'M', '廙'), (0xFA84, 'M', '彩'), @@ -4615,6 +4582,10 @@ def _seg_43(): (0xFADA, 'X'), (0xFB00, 'M', 'ff'), (0xFB01, 'M', 'fi'), + ] + +def _seg_44() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0xFB02, 'M', 'fl'), (0xFB03, 'M', 'ffi'), (0xFB04, 'M', 'ffl'), @@ -4624,11 +4595,6 @@ def _seg_43(): (0xFB14, 'M', 'մե'), (0xFB15, 'M', 'մի'), (0xFB16, 'M', 'վն'), - ] - -def _seg_44(): - # type: () -> List[Union[Tuple[int, str], Tuple[int, str, str]]] - return [ (0xFB17, 'M', 'մխ'), (0xFB18, 'X'), (0xFB1D, 'M', 'יִ'), @@ -4713,13 +4679,17 @@ def _seg_44(): (0xFBAE, 'M', 'ے'), (0xFBB0, 'M', 'ۓ'), (0xFBB2, 'V'), - (0xFBC2, 'X'), + (0xFBC3, 'X'), (0xFBD3, 'M', 'ڭ'), (0xFBD7, 'M', 'ۇ'), (0xFBD9, 'M', 'ۆ'), (0xFBDB, 'M', 'ۈ'), (0xFBDD, 'M', 'ۇٴ'), (0xFBDE, 'M', 'ۋ'), + ] + +def _seg_45() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0xFBE0, 'M', 'ۅ'), (0xFBE2, 'M', 'ۉ'), (0xFBE4, 'M', 'ې'), @@ -4729,11 +4699,6 @@ def _seg_44(): (0xFBEE, 'M', 'ئو'), (0xFBF0, 'M', 'ئۇ'), (0xFBF2, 'M', 'ئۆ'), - ] - -def _seg_45(): - # type: () -> List[Union[Tuple[int, str], Tuple[int, str, str]]] - return [ (0xFBF4, 'M', 'ئۈ'), (0xFBF6, 'M', 'ئې'), (0xFBF9, 'M', 'ئى'), @@ -4825,6 +4790,10 @@ def _seg_45(): (0xFC54, 'M', 'هي'), (0xFC55, 'M', 'يج'), (0xFC56, 'M', 'يح'), + ] + +def _seg_46() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0xFC57, 'M', 'يخ'), (0xFC58, 'M', 'يم'), (0xFC59, 'M', 'يى'), @@ -4834,11 +4803,6 @@ def _seg_45(): (0xFC5D, 'M', 'ىٰ'), (0xFC5E, '3', ' ٌّ'), (0xFC5F, '3', ' ٍّ'), - ] - -def _seg_46(): - # type: () -> List[Union[Tuple[int, str], Tuple[int, str, str]]] - return [ (0xFC60, '3', ' َّ'), (0xFC61, '3', ' ُّ'), (0xFC62, '3', ' ِّ'), @@ -4930,6 +4894,10 @@ def _seg_46(): (0xFCB8, 'M', 'طح'), (0xFCB9, 'M', 'ظم'), (0xFCBA, 'M', 'عج'), + ] + +def _seg_47() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0xFCBB, 'M', 'عم'), (0xFCBC, 'M', 'غج'), (0xFCBD, 'M', 'غم'), @@ -4939,11 +4907,6 @@ def _seg_46(): (0xFCC1, 'M', 'فم'), (0xFCC2, 'M', 'قح'), (0xFCC3, 'M', 'قم'), - ] - -def _seg_47(): - # type: () -> List[Union[Tuple[int, str], Tuple[int, str, str]]] - return [ (0xFCC4, 'M', 'كج'), (0xFCC5, 'M', 'كح'), (0xFCC6, 'M', 'كخ'), @@ -5035,6 +4998,10 @@ def _seg_47(): (0xFD1C, 'M', 'حي'), (0xFD1D, 'M', 'جى'), (0xFD1E, 'M', 'جي'), + ] + +def _seg_48() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0xFD1F, 'M', 'خى'), (0xFD20, 'M', 'خي'), (0xFD21, 'M', 'صى'), @@ -5044,11 +5011,6 @@ def _seg_47(): (0xFD25, 'M', 'شج'), (0xFD26, 'M', 'شح'), (0xFD27, 'M', 'شخ'), - ] - -def _seg_48(): - # type: () -> List[Union[Tuple[int, str], Tuple[int, str, str]]] - return [ (0xFD28, 'M', 'شم'), (0xFD29, 'M', 'شر'), (0xFD2A, 'M', 'سر'), @@ -5071,7 +5033,6 @@ def _seg_48(): (0xFD3B, 'M', 'ظم'), (0xFD3C, 'M', 'اً'), (0xFD3E, 'V'), - (0xFD40, 'X'), (0xFD50, 'M', 'تجم'), (0xFD51, 'M', 'تحج'), (0xFD53, 'M', 'تحم'), @@ -5141,6 +5102,10 @@ def _seg_48(): (0xFDA4, 'M', 'تمى'), (0xFDA5, 'M', 'جمي'), (0xFDA6, 'M', 'جحى'), + ] + +def _seg_49() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0xFDA7, 'M', 'جمى'), (0xFDA8, 'M', 'سخى'), (0xFDA9, 'M', 'صحي'), @@ -5149,11 +5114,6 @@ def _seg_48(): (0xFDAC, 'M', 'لجي'), (0xFDAD, 'M', 'لمي'), (0xFDAE, 'M', 'يحي'), - ] - -def _seg_49(): - # type: () -> List[Union[Tuple[int, str], Tuple[int, str, str]]] - return [ (0xFDAF, 'M', 'يجي'), (0xFDB0, 'M', 'يمي'), (0xFDB1, 'M', 'ممي'), @@ -5180,6 +5140,8 @@ def _seg_49(): (0xFDC6, 'M', 'سخي'), (0xFDC7, 'M', 'نجي'), (0xFDC8, 'X'), + (0xFDCF, 'V'), + (0xFDD0, 'X'), (0xFDF0, 'M', 'صلے'), (0xFDF1, 'M', 'قلے'), (0xFDF2, 'M', 'الله'), @@ -5194,7 +5156,6 @@ def _seg_49(): (0xFDFB, '3', 'جل جلاله'), (0xFDFC, 'M', 'ریال'), (0xFDFD, 'V'), - (0xFDFE, 'X'), (0xFE00, 'I'), (0xFE10, '3', ','), (0xFE11, 'M', '、'), @@ -5245,6 +5206,10 @@ def _seg_49(): (0xFE5B, '3', '{'), (0xFE5C, '3', '}'), (0xFE5D, 'M', '〔'), + ] + +def _seg_50() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0xFE5E, 'M', '〕'), (0xFE5F, '3', '#'), (0xFE60, '3', '&'), @@ -5254,11 +5219,6 @@ def _seg_49(): (0xFE64, '3', '<'), (0xFE65, '3', '>'), (0xFE66, '3', '='), - ] - -def _seg_50(): - # type: () -> List[Union[Tuple[int, str], Tuple[int, str, str]]] - return [ (0xFE67, 'X'), (0xFE68, '3', '\\'), (0xFE69, '3', '$'), @@ -5350,6 +5310,10 @@ def _seg_50(): (0xFF18, 'M', '8'), (0xFF19, 'M', '9'), (0xFF1A, '3', ':'), + ] + +def _seg_51() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0xFF1B, '3', ';'), (0xFF1C, '3', '<'), (0xFF1D, '3', '='), @@ -5359,11 +5323,6 @@ def _seg_50(): (0xFF21, 'M', 'a'), (0xFF22, 'M', 'b'), (0xFF23, 'M', 'c'), - ] - -def _seg_51(): - # type: () -> List[Union[Tuple[int, str], Tuple[int, str, str]]] - return [ (0xFF24, 'M', 'd'), (0xFF25, 'M', 'e'), (0xFF26, 'M', 'f'), @@ -5455,6 +5414,10 @@ def _seg_51(): (0xFF7C, 'M', 'シ'), (0xFF7D, 'M', 'ス'), (0xFF7E, 'M', 'セ'), + ] + +def _seg_52() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0xFF7F, 'M', 'ソ'), (0xFF80, 'M', 'タ'), (0xFF81, 'M', 'チ'), @@ -5464,11 +5427,6 @@ def _seg_51(): (0xFF85, 'M', 'ナ'), (0xFF86, 'M', 'ニ'), (0xFF87, 'M', 'ヌ'), - ] - -def _seg_52(): - # type: () -> List[Union[Tuple[int, str], Tuple[int, str, str]]] - return [ (0xFF88, 'M', 'ネ'), (0xFF89, 'M', 'ノ'), (0xFF8A, 'M', 'ハ'), @@ -5560,6 +5518,10 @@ def _seg_52(): (0xFFE7, 'X'), (0xFFE8, 'M', '│'), (0xFFE9, 'M', '←'), + ] + +def _seg_53() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0xFFEA, 'M', '↑'), (0xFFEB, 'M', '→'), (0xFFEC, 'M', '↓'), @@ -5569,11 +5531,6 @@ def _seg_52(): (0x10000, 'V'), (0x1000C, 'X'), (0x1000D, 'V'), - ] - -def _seg_53(): - # type: () -> List[Union[Tuple[int, str], Tuple[int, str, str]]] - return [ (0x10027, 'X'), (0x10028, 'V'), (0x1003B, 'X'), @@ -5665,6 +5622,10 @@ def _seg_53(): (0x104B3, 'M', '𐓛'), (0x104B4, 'M', '𐓜'), (0x104B5, 'M', '𐓝'), + ] + +def _seg_54() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x104B6, 'M', '𐓞'), (0x104B7, 'M', '𐓟'), (0x104B8, 'M', '𐓠'), @@ -5674,11 +5635,6 @@ def _seg_53(): (0x104BC, 'M', '𐓤'), (0x104BD, 'M', '𐓥'), (0x104BE, 'M', '𐓦'), - ] - -def _seg_54(): - # type: () -> List[Union[Tuple[int, str], Tuple[int, str, str]]] - return [ (0x104BF, 'M', '𐓧'), (0x104C0, 'M', '𐓨'), (0x104C1, 'M', '𐓩'), @@ -5708,13 +5664,123 @@ def _seg_54(): (0x10530, 'V'), (0x10564, 'X'), (0x1056F, 'V'), - (0x10570, 'X'), + (0x10570, 'M', '𐖗'), + (0x10571, 'M', '𐖘'), + (0x10572, 'M', '𐖙'), + (0x10573, 'M', '𐖚'), + (0x10574, 'M', '𐖛'), + (0x10575, 'M', '𐖜'), + (0x10576, 'M', '𐖝'), + (0x10577, 'M', '𐖞'), + (0x10578, 'M', '𐖟'), + (0x10579, 'M', '𐖠'), + (0x1057A, 'M', '𐖡'), + (0x1057B, 'X'), + (0x1057C, 'M', '𐖣'), + (0x1057D, 'M', '𐖤'), + (0x1057E, 'M', '𐖥'), + (0x1057F, 'M', '𐖦'), + (0x10580, 'M', '𐖧'), + (0x10581, 'M', '𐖨'), + (0x10582, 'M', '𐖩'), + (0x10583, 'M', '𐖪'), + (0x10584, 'M', '𐖫'), + (0x10585, 'M', '𐖬'), + (0x10586, 'M', '𐖭'), + (0x10587, 'M', '𐖮'), + (0x10588, 'M', '𐖯'), + (0x10589, 'M', '𐖰'), + (0x1058A, 'M', '𐖱'), + (0x1058B, 'X'), + (0x1058C, 'M', '𐖳'), + (0x1058D, 'M', '𐖴'), + (0x1058E, 'M', '𐖵'), + (0x1058F, 'M', '𐖶'), + (0x10590, 'M', '𐖷'), + (0x10591, 'M', '𐖸'), + (0x10592, 'M', '𐖹'), + (0x10593, 'X'), + (0x10594, 'M', '𐖻'), + (0x10595, 'M', '𐖼'), + (0x10596, 'X'), + (0x10597, 'V'), + (0x105A2, 'X'), + (0x105A3, 'V'), + (0x105B2, 'X'), + (0x105B3, 'V'), + (0x105BA, 'X'), + (0x105BB, 'V'), + (0x105BD, 'X'), (0x10600, 'V'), (0x10737, 'X'), (0x10740, 'V'), (0x10756, 'X'), (0x10760, 'V'), (0x10768, 'X'), + (0x10780, 'V'), + (0x10781, 'M', 'ː'), + (0x10782, 'M', 'ˑ'), + (0x10783, 'M', 'æ'), + (0x10784, 'M', 'ʙ'), + (0x10785, 'M', 'ɓ'), + (0x10786, 'X'), + (0x10787, 'M', 'ʣ'), + (0x10788, 'M', 'ꭦ'), + ] + +def _seg_55() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x10789, 'M', 'ʥ'), + (0x1078A, 'M', 'ʤ'), + (0x1078B, 'M', 'ɖ'), + (0x1078C, 'M', 'ɗ'), + (0x1078D, 'M', 'ᶑ'), + (0x1078E, 'M', 'ɘ'), + (0x1078F, 'M', 'ɞ'), + (0x10790, 'M', 'ʩ'), + (0x10791, 'M', 'ɤ'), + (0x10792, 'M', 'ɢ'), + (0x10793, 'M', 'ɠ'), + (0x10794, 'M', 'ʛ'), + (0x10795, 'M', 'ħ'), + (0x10796, 'M', 'ʜ'), + (0x10797, 'M', 'ɧ'), + (0x10798, 'M', 'ʄ'), + (0x10799, 'M', 'ʪ'), + (0x1079A, 'M', 'ʫ'), + (0x1079B, 'M', 'ɬ'), + (0x1079C, 'M', '𝼄'), + (0x1079D, 'M', 'ꞎ'), + (0x1079E, 'M', 'ɮ'), + (0x1079F, 'M', '𝼅'), + (0x107A0, 'M', 'ʎ'), + (0x107A1, 'M', '𝼆'), + (0x107A2, 'M', 'ø'), + (0x107A3, 'M', 'ɶ'), + (0x107A4, 'M', 'ɷ'), + (0x107A5, 'M', 'q'), + (0x107A6, 'M', 'ɺ'), + (0x107A7, 'M', '𝼈'), + (0x107A8, 'M', 'ɽ'), + (0x107A9, 'M', 'ɾ'), + (0x107AA, 'M', 'ʀ'), + (0x107AB, 'M', 'ʨ'), + (0x107AC, 'M', 'ʦ'), + (0x107AD, 'M', 'ꭧ'), + (0x107AE, 'M', 'ʧ'), + (0x107AF, 'M', 'ʈ'), + (0x107B0, 'M', 'ⱱ'), + (0x107B1, 'X'), + (0x107B2, 'M', 'ʏ'), + (0x107B3, 'M', 'ʡ'), + (0x107B4, 'M', 'ʢ'), + (0x107B5, 'M', 'ʘ'), + (0x107B6, 'M', 'ǀ'), + (0x107B7, 'M', 'ǁ'), + (0x107B8, 'M', 'ǂ'), + (0x107B9, 'M', '𝼊'), + (0x107BA, 'M', '𝼞'), + (0x107BB, 'X'), (0x10800, 'V'), (0x10806, 'X'), (0x10808, 'V'), @@ -5764,6 +5830,10 @@ def _seg_54(): (0x10A60, 'V'), (0x10AA0, 'X'), (0x10AC0, 'V'), + ] + +def _seg_56() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x10AE7, 'X'), (0x10AEB, 'V'), (0x10AF7, 'X'), @@ -5779,11 +5849,6 @@ def _seg_54(): (0x10B9D, 'X'), (0x10BA9, 'V'), (0x10BB0, 'X'), - ] - -def _seg_55(): - # type: () -> List[Union[Tuple[int, str], Tuple[int, str, str]]] - return [ (0x10C00, 'V'), (0x10C49, 'X'), (0x10C80, 'M', '𐳀'), @@ -5852,10 +5917,12 @@ def _seg_55(): (0x10EAE, 'X'), (0x10EB0, 'V'), (0x10EB2, 'X'), - (0x10F00, 'V'), + (0x10EFD, 'V'), (0x10F28, 'X'), (0x10F30, 'V'), (0x10F5A, 'X'), + (0x10F70, 'V'), + (0x10F8A, 'X'), (0x10FB0, 'V'), (0x10FCC, 'X'), (0x10FE0, 'V'), @@ -5863,11 +5930,15 @@ def _seg_55(): (0x11000, 'V'), (0x1104E, 'X'), (0x11052, 'V'), - (0x11070, 'X'), + (0x11076, 'X'), (0x1107F, 'V'), (0x110BD, 'X'), (0x110BE, 'V'), - (0x110C2, 'X'), + ] + +def _seg_57() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x110C3, 'X'), (0x110D0, 'V'), (0x110E9, 'X'), (0x110F0, 'V'), @@ -5884,13 +5955,8 @@ def _seg_55(): (0x111F5, 'X'), (0x11200, 'V'), (0x11212, 'X'), - ] - -def _seg_56(): - # type: () -> List[Union[Tuple[int, str], Tuple[int, str, str]]] - return [ (0x11213, 'V'), - (0x1123F, 'X'), + (0x11242, 'X'), (0x11280, 'V'), (0x11287, 'X'), (0x11288, 'V'), @@ -5954,7 +6020,7 @@ def _seg_56(): (0x11660, 'V'), (0x1166D, 'X'), (0x11680, 'V'), - (0x116B9, 'X'), + (0x116BA, 'X'), (0x116C0, 'V'), (0x116CA, 'X'), (0x11700, 'V'), @@ -5962,7 +6028,7 @@ def _seg_56(): (0x1171D, 'V'), (0x1172C, 'X'), (0x11730, 'V'), - (0x11740, 'X'), + (0x11747, 'X'), (0x11800, 'V'), (0x1183C, 'X'), (0x118A0, 'M', '𑣀'), @@ -5972,6 +6038,10 @@ def _seg_56(): (0x118A4, 'M', '𑣄'), (0x118A5, 'M', '𑣅'), (0x118A6, 'M', '𑣆'), + ] + +def _seg_58() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x118A7, 'M', '𑣇'), (0x118A8, 'M', '𑣈'), (0x118A9, 'M', '𑣉'), @@ -5989,11 +6059,6 @@ def _seg_56(): (0x118B5, 'M', '𑣕'), (0x118B6, 'M', '𑣖'), (0x118B7, 'M', '𑣗'), - ] - -def _seg_57(): - # type: () -> List[Union[Tuple[int, str], Tuple[int, str, str]]] - return [ (0x118B8, 'M', '𑣘'), (0x118B9, 'M', '𑣙'), (0x118BA, 'M', '𑣚'), @@ -6030,8 +6095,10 @@ def _seg_57(): (0x11A48, 'X'), (0x11A50, 'V'), (0x11AA3, 'X'), - (0x11AC0, 'V'), + (0x11AB0, 'V'), (0x11AF9, 'X'), + (0x11B00, 'V'), + (0x11B0A, 'X'), (0x11C00, 'V'), (0x11C09, 'X'), (0x11C0A, 'V'), @@ -6074,6 +6141,16 @@ def _seg_57(): (0x11DAA, 'X'), (0x11EE0, 'V'), (0x11EF9, 'X'), + (0x11F00, 'V'), + ] + +def _seg_59() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x11F11, 'X'), + (0x11F12, 'V'), + (0x11F3B, 'X'), + (0x11F3E, 'V'), + (0x11F5A, 'X'), (0x11FB0, 'V'), (0x11FB1, 'X'), (0x11FC0, 'V'), @@ -6086,23 +6163,24 @@ def _seg_57(): (0x12475, 'X'), (0x12480, 'V'), (0x12544, 'X'), + (0x12F90, 'V'), + (0x12FF3, 'X'), (0x13000, 'V'), - (0x1342F, 'X'), + (0x13430, 'X'), + (0x13440, 'V'), + (0x13456, 'X'), (0x14400, 'V'), (0x14647, 'X'), (0x16800, 'V'), (0x16A39, 'X'), (0x16A40, 'V'), (0x16A5F, 'X'), - ] - -def _seg_58(): - # type: () -> List[Union[Tuple[int, str], Tuple[int, str, str]]] - return [ (0x16A60, 'V'), (0x16A6A, 'X'), (0x16A6E, 'V'), - (0x16A70, 'X'), + (0x16ABF, 'X'), + (0x16AC0, 'V'), + (0x16ACA, 'X'), (0x16AD0, 'V'), (0x16AEE, 'X'), (0x16AF0, 'V'), @@ -6167,10 +6245,24 @@ def _seg_58(): (0x18CD6, 'X'), (0x18D00, 'V'), (0x18D09, 'X'), + (0x1AFF0, 'V'), + ] + +def _seg_60() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1AFF4, 'X'), + (0x1AFF5, 'V'), + (0x1AFFC, 'X'), + (0x1AFFD, 'V'), + (0x1AFFF, 'X'), (0x1B000, 'V'), - (0x1B11F, 'X'), + (0x1B123, 'X'), + (0x1B132, 'V'), + (0x1B133, 'X'), (0x1B150, 'V'), (0x1B153, 'X'), + (0x1B155, 'V'), + (0x1B156, 'X'), (0x1B164, 'V'), (0x1B168, 'X'), (0x1B170, 'V'), @@ -6186,6 +6278,12 @@ def _seg_58(): (0x1BC9C, 'V'), (0x1BCA0, 'I'), (0x1BCA4, 'X'), + (0x1CF00, 'V'), + (0x1CF2E, 'X'), + (0x1CF30, 'V'), + (0x1CF47, 'X'), + (0x1CF50, 'V'), + (0x1CFC4, 'X'), (0x1D000, 'V'), (0x1D0F6, 'X'), (0x1D100, 'V'), @@ -6199,11 +6297,6 @@ def _seg_58(): (0x1D163, 'M', '𝅘𝅥𝅱'), (0x1D164, 'M', '𝅘𝅥𝅲'), (0x1D165, 'V'), - ] - -def _seg_59(): - # type: () -> List[Union[Tuple[int, str], Tuple[int, str, str]]] - return [ (0x1D173, 'X'), (0x1D17B, 'V'), (0x1D1BB, 'M', '𝆹𝅥'), @@ -6213,9 +6306,11 @@ def _seg_59(): (0x1D1BF, 'M', '𝆹𝅥𝅯'), (0x1D1C0, 'M', '𝆺𝅥𝅯'), (0x1D1C1, 'V'), - (0x1D1E9, 'X'), + (0x1D1EB, 'X'), (0x1D200, 'V'), (0x1D246, 'X'), + (0x1D2C0, 'V'), + (0x1D2D4, 'X'), (0x1D2E0, 'V'), (0x1D2F4, 'X'), (0x1D300, 'V'), @@ -6255,6 +6350,10 @@ def _seg_59(): (0x1D41E, 'M', 'e'), (0x1D41F, 'M', 'f'), (0x1D420, 'M', 'g'), + ] + +def _seg_61() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x1D421, 'M', 'h'), (0x1D422, 'M', 'i'), (0x1D423, 'M', 'j'), @@ -6304,11 +6403,6 @@ def _seg_59(): (0x1D44F, 'M', 'b'), (0x1D450, 'M', 'c'), (0x1D451, 'M', 'd'), - ] - -def _seg_60(): - # type: () -> List[Union[Tuple[int, str], Tuple[int, str, str]]] - return [ (0x1D452, 'M', 'e'), (0x1D453, 'M', 'f'), (0x1D454, 'M', 'g'), @@ -6360,6 +6454,10 @@ def _seg_60(): (0x1D482, 'M', 'a'), (0x1D483, 'M', 'b'), (0x1D484, 'M', 'c'), + ] + +def _seg_62() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x1D485, 'M', 'd'), (0x1D486, 'M', 'e'), (0x1D487, 'M', 'f'), @@ -6409,11 +6507,6 @@ def _seg_60(): (0x1D4B6, 'M', 'a'), (0x1D4B7, 'M', 'b'), (0x1D4B8, 'M', 'c'), - ] - -def _seg_61(): - # type: () -> List[Union[Tuple[int, str], Tuple[int, str, str]]] - return [ (0x1D4B9, 'M', 'd'), (0x1D4BA, 'X'), (0x1D4BB, 'M', 'f'), @@ -6465,6 +6558,10 @@ def _seg_61(): (0x1D4E9, 'M', 'z'), (0x1D4EA, 'M', 'a'), (0x1D4EB, 'M', 'b'), + ] + +def _seg_63() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x1D4EC, 'M', 'c'), (0x1D4ED, 'M', 'd'), (0x1D4EE, 'M', 'e'), @@ -6514,11 +6611,6 @@ def _seg_61(): (0x1D51B, 'M', 'x'), (0x1D51C, 'M', 'y'), (0x1D51D, 'X'), - ] - -def _seg_62(): - # type: () -> List[Union[Tuple[int, str], Tuple[int, str, str]]] - return [ (0x1D51E, 'M', 'a'), (0x1D51F, 'M', 'b'), (0x1D520, 'M', 'c'), @@ -6570,6 +6662,10 @@ def _seg_62(): (0x1D550, 'M', 'y'), (0x1D551, 'X'), (0x1D552, 'M', 'a'), + ] + +def _seg_64() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x1D553, 'M', 'b'), (0x1D554, 'M', 'c'), (0x1D555, 'M', 'd'), @@ -6619,11 +6715,6 @@ def _seg_62(): (0x1D581, 'M', 'v'), (0x1D582, 'M', 'w'), (0x1D583, 'M', 'x'), - ] - -def _seg_63(): - # type: () -> List[Union[Tuple[int, str], Tuple[int, str, str]]] - return [ (0x1D584, 'M', 'y'), (0x1D585, 'M', 'z'), (0x1D586, 'M', 'a'), @@ -6675,6 +6766,10 @@ def _seg_63(): (0x1D5B4, 'M', 'u'), (0x1D5B5, 'M', 'v'), (0x1D5B6, 'M', 'w'), + ] + +def _seg_65() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x1D5B7, 'M', 'x'), (0x1D5B8, 'M', 'y'), (0x1D5B9, 'M', 'z'), @@ -6724,11 +6819,6 @@ def _seg_63(): (0x1D5E5, 'M', 'r'), (0x1D5E6, 'M', 's'), (0x1D5E7, 'M', 't'), - ] - -def _seg_64(): - # type: () -> List[Union[Tuple[int, str], Tuple[int, str, str]]] - return [ (0x1D5E8, 'M', 'u'), (0x1D5E9, 'M', 'v'), (0x1D5EA, 'M', 'w'), @@ -6780,6 +6870,10 @@ def _seg_64(): (0x1D618, 'M', 'q'), (0x1D619, 'M', 'r'), (0x1D61A, 'M', 's'), + ] + +def _seg_66() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x1D61B, 'M', 't'), (0x1D61C, 'M', 'u'), (0x1D61D, 'M', 'v'), @@ -6829,11 +6923,6 @@ def _seg_64(): (0x1D649, 'M', 'n'), (0x1D64A, 'M', 'o'), (0x1D64B, 'M', 'p'), - ] - -def _seg_65(): - # type: () -> List[Union[Tuple[int, str], Tuple[int, str, str]]] - return [ (0x1D64C, 'M', 'q'), (0x1D64D, 'M', 'r'), (0x1D64E, 'M', 's'), @@ -6885,6 +6974,10 @@ def _seg_65(): (0x1D67C, 'M', 'm'), (0x1D67D, 'M', 'n'), (0x1D67E, 'M', 'o'), + ] + +def _seg_67() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x1D67F, 'M', 'p'), (0x1D680, 'M', 'q'), (0x1D681, 'M', 'r'), @@ -6934,11 +7027,6 @@ def _seg_65(): (0x1D6AE, 'M', 'η'), (0x1D6AF, 'M', 'θ'), (0x1D6B0, 'M', 'ι'), - ] - -def _seg_66(): - # type: () -> List[Union[Tuple[int, str], Tuple[int, str, str]]] - return [ (0x1D6B1, 'M', 'κ'), (0x1D6B2, 'M', 'λ'), (0x1D6B3, 'M', 'μ'), @@ -6990,6 +7078,10 @@ def _seg_66(): (0x1D6E2, 'M', 'α'), (0x1D6E3, 'M', 'β'), (0x1D6E4, 'M', 'γ'), + ] + +def _seg_68() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x1D6E5, 'M', 'δ'), (0x1D6E6, 'M', 'ε'), (0x1D6E7, 'M', 'ζ'), @@ -7039,11 +7131,6 @@ def _seg_66(): (0x1D714, 'M', 'ω'), (0x1D715, 'M', '∂'), (0x1D716, 'M', 'ε'), - ] - -def _seg_67(): - # type: () -> List[Union[Tuple[int, str], Tuple[int, str, str]]] - return [ (0x1D717, 'M', 'θ'), (0x1D718, 'M', 'κ'), (0x1D719, 'M', 'φ'), @@ -7095,6 +7182,10 @@ def _seg_67(): (0x1D747, 'M', 'σ'), (0x1D749, 'M', 'τ'), (0x1D74A, 'M', 'υ'), + ] + +def _seg_69() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x1D74B, 'M', 'φ'), (0x1D74C, 'M', 'χ'), (0x1D74D, 'M', 'ψ'), @@ -7144,11 +7235,6 @@ def _seg_67(): (0x1D779, 'M', 'κ'), (0x1D77A, 'M', 'λ'), (0x1D77B, 'M', 'μ'), - ] - -def _seg_68(): - # type: () -> List[Union[Tuple[int, str], Tuple[int, str, str]]] - return [ (0x1D77C, 'M', 'ν'), (0x1D77D, 'M', 'ξ'), (0x1D77E, 'M', 'ο'), @@ -7200,6 +7286,10 @@ def _seg_68(): (0x1D7AD, 'M', 'δ'), (0x1D7AE, 'M', 'ε'), (0x1D7AF, 'M', 'ζ'), + ] + +def _seg_70() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x1D7B0, 'M', 'η'), (0x1D7B1, 'M', 'θ'), (0x1D7B2, 'M', 'ι'), @@ -7249,11 +7339,6 @@ def _seg_68(): (0x1D7E1, 'M', '9'), (0x1D7E2, 'M', '0'), (0x1D7E3, 'M', '1'), - ] - -def _seg_69(): - # type: () -> List[Union[Tuple[int, str], Tuple[int, str, str]]] - return [ (0x1D7E4, 'M', '2'), (0x1D7E5, 'M', '3'), (0x1D7E6, 'M', '4'), @@ -7288,6 +7373,10 @@ def _seg_69(): (0x1DAA0, 'X'), (0x1DAA1, 'V'), (0x1DAB0, 'X'), + (0x1DF00, 'V'), + (0x1DF1F, 'X'), + (0x1DF25, 'V'), + (0x1DF2B, 'X'), (0x1E000, 'V'), (0x1E007, 'X'), (0x1E008, 'V'), @@ -7298,6 +7387,75 @@ def _seg_69(): (0x1E025, 'X'), (0x1E026, 'V'), (0x1E02B, 'X'), + (0x1E030, 'M', 'а'), + (0x1E031, 'M', 'б'), + (0x1E032, 'M', 'в'), + ] + +def _seg_71() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1E033, 'M', 'г'), + (0x1E034, 'M', 'д'), + (0x1E035, 'M', 'е'), + (0x1E036, 'M', 'ж'), + (0x1E037, 'M', 'з'), + (0x1E038, 'M', 'и'), + (0x1E039, 'M', 'к'), + (0x1E03A, 'M', 'л'), + (0x1E03B, 'M', 'м'), + (0x1E03C, 'M', 'о'), + (0x1E03D, 'M', 'п'), + (0x1E03E, 'M', 'р'), + (0x1E03F, 'M', 'с'), + (0x1E040, 'M', 'т'), + (0x1E041, 'M', 'у'), + (0x1E042, 'M', 'ф'), + (0x1E043, 'M', 'х'), + (0x1E044, 'M', 'ц'), + (0x1E045, 'M', 'ч'), + (0x1E046, 'M', 'ш'), + (0x1E047, 'M', 'ы'), + (0x1E048, 'M', 'э'), + (0x1E049, 'M', 'ю'), + (0x1E04A, 'M', 'ꚉ'), + (0x1E04B, 'M', 'ә'), + (0x1E04C, 'M', 'і'), + (0x1E04D, 'M', 'ј'), + (0x1E04E, 'M', 'ө'), + (0x1E04F, 'M', 'ү'), + (0x1E050, 'M', 'ӏ'), + (0x1E051, 'M', 'а'), + (0x1E052, 'M', 'б'), + (0x1E053, 'M', 'в'), + (0x1E054, 'M', 'г'), + (0x1E055, 'M', 'д'), + (0x1E056, 'M', 'е'), + (0x1E057, 'M', 'ж'), + (0x1E058, 'M', 'з'), + (0x1E059, 'M', 'и'), + (0x1E05A, 'M', 'к'), + (0x1E05B, 'M', 'л'), + (0x1E05C, 'M', 'о'), + (0x1E05D, 'M', 'п'), + (0x1E05E, 'M', 'с'), + (0x1E05F, 'M', 'у'), + (0x1E060, 'M', 'ф'), + (0x1E061, 'M', 'х'), + (0x1E062, 'M', 'ц'), + (0x1E063, 'M', 'ч'), + (0x1E064, 'M', 'ш'), + (0x1E065, 'M', 'ъ'), + (0x1E066, 'M', 'ы'), + (0x1E067, 'M', 'ґ'), + (0x1E068, 'M', 'і'), + (0x1E069, 'M', 'ѕ'), + (0x1E06A, 'M', 'џ'), + (0x1E06B, 'M', 'ҫ'), + (0x1E06C, 'M', 'ꙑ'), + (0x1E06D, 'M', 'ұ'), + (0x1E06E, 'X'), + (0x1E08F, 'V'), + (0x1E090, 'X'), (0x1E100, 'V'), (0x1E12D, 'X'), (0x1E130, 'V'), @@ -7306,10 +7464,22 @@ def _seg_69(): (0x1E14A, 'X'), (0x1E14E, 'V'), (0x1E150, 'X'), + (0x1E290, 'V'), + (0x1E2AF, 'X'), (0x1E2C0, 'V'), (0x1E2FA, 'X'), (0x1E2FF, 'V'), (0x1E300, 'X'), + (0x1E4D0, 'V'), + (0x1E4FA, 'X'), + (0x1E7E0, 'V'), + (0x1E7E7, 'X'), + (0x1E7E8, 'V'), + (0x1E7EC, 'X'), + (0x1E7ED, 'V'), + (0x1E7EF, 'X'), + (0x1E7F0, 'V'), + (0x1E7FF, 'X'), (0x1E800, 'V'), (0x1E8C5, 'X'), (0x1E8C7, 'V'), @@ -7324,6 +7494,10 @@ def _seg_69(): (0x1E907, 'M', '𞤩'), (0x1E908, 'M', '𞤪'), (0x1E909, 'M', '𞤫'), + ] + +def _seg_72() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x1E90A, 'M', '𞤬'), (0x1E90B, 'M', '𞤭'), (0x1E90C, 'M', '𞤮'), @@ -7354,11 +7528,6 @@ def _seg_69(): (0x1E95A, 'X'), (0x1E95E, 'V'), (0x1E960, 'X'), - ] - -def _seg_70(): - # type: () -> List[Union[Tuple[int, str], Tuple[int, str, str]]] - return [ (0x1EC71, 'V'), (0x1ECB5, 'X'), (0x1ED01, 'V'), @@ -7429,6 +7598,10 @@ def _seg_70(): (0x1EE48, 'X'), (0x1EE49, 'M', 'ي'), (0x1EE4A, 'X'), + ] + +def _seg_73() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x1EE4B, 'M', 'ل'), (0x1EE4C, 'X'), (0x1EE4D, 'M', 'ن'), @@ -7459,11 +7632,6 @@ def _seg_70(): (0x1EE68, 'M', 'ط'), (0x1EE69, 'M', 'ي'), (0x1EE6A, 'M', 'ك'), - ] - -def _seg_71(): - # type: () -> List[Union[Tuple[int, str], Tuple[int, str, str]]] - return [ (0x1EE6B, 'X'), (0x1EE6C, 'M', 'م'), (0x1EE6D, 'M', 'ن'), @@ -7534,6 +7702,10 @@ def _seg_71(): (0x1EEB2, 'M', 'ق'), (0x1EEB3, 'M', 'ر'), (0x1EEB4, 'M', 'ش'), + ] + +def _seg_74() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x1EEB5, 'M', 'ت'), (0x1EEB6, 'M', 'ث'), (0x1EEB7, 'M', 'خ'), @@ -7564,11 +7736,6 @@ def _seg_71(): (0x1F106, '3', '5,'), (0x1F107, '3', '6,'), (0x1F108, '3', '7,'), - ] - -def _seg_72(): - # type: () -> List[Union[Tuple[int, str], Tuple[int, str, str]]] - return [ (0x1F109, '3', '8,'), (0x1F10A, '3', '9,'), (0x1F10B, 'V'), @@ -7639,6 +7806,10 @@ def _seg_72(): (0x1F150, 'V'), (0x1F16A, 'M', 'mc'), (0x1F16B, 'M', 'md'), + ] + +def _seg_75() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x1F16C, 'M', 'mr'), (0x1F16D, 'V'), (0x1F190, 'M', 'dj'), @@ -7669,11 +7840,6 @@ def _seg_72(): (0x1F221, 'M', '終'), (0x1F222, 'M', '生'), (0x1F223, 'M', '販'), - ] - -def _seg_73(): - # type: () -> List[Union[Tuple[int, str], Tuple[int, str, str]]] - return [ (0x1F224, 'M', '声'), (0x1F225, 'M', '吹'), (0x1F226, 'M', '演'), @@ -7716,16 +7882,18 @@ def _seg_73(): (0x1F266, 'X'), (0x1F300, 'V'), (0x1F6D8, 'X'), - (0x1F6E0, 'V'), + (0x1F6DC, 'V'), (0x1F6ED, 'X'), (0x1F6F0, 'V'), (0x1F6FD, 'X'), (0x1F700, 'V'), - (0x1F774, 'X'), - (0x1F780, 'V'), - (0x1F7D9, 'X'), + (0x1F777, 'X'), + (0x1F77B, 'V'), + (0x1F7DA, 'X'), (0x1F7E0, 'V'), (0x1F7EC, 'X'), + (0x1F7F0, 'V'), + (0x1F7F1, 'X'), (0x1F800, 'V'), (0x1F80C, 'X'), (0x1F810, 'V'), @@ -7739,27 +7907,27 @@ def _seg_73(): (0x1F8B0, 'V'), (0x1F8B2, 'X'), (0x1F900, 'V'), - (0x1F979, 'X'), - (0x1F97A, 'V'), - (0x1F9CC, 'X'), - (0x1F9CD, 'V'), (0x1FA54, 'X'), (0x1FA60, 'V'), (0x1FA6E, 'X'), + ] + +def _seg_76() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x1FA70, 'V'), - (0x1FA75, 'X'), - (0x1FA78, 'V'), - (0x1FA7B, 'X'), + (0x1FA7D, 'X'), (0x1FA80, 'V'), - (0x1FA87, 'X'), + (0x1FA89, 'X'), (0x1FA90, 'V'), - (0x1FAA9, 'X'), - (0x1FAB0, 'V'), - (0x1FAB7, 'X'), - (0x1FAC0, 'V'), - (0x1FAC3, 'X'), - (0x1FAD0, 'V'), - (0x1FAD7, 'X'), + (0x1FABE, 'X'), + (0x1FABF, 'V'), + (0x1FAC6, 'X'), + (0x1FACE, 'V'), + (0x1FADC, 'X'), + (0x1FAE0, 'V'), + (0x1FAE9, 'X'), + (0x1FAF0, 'V'), + (0x1FAF9, 'X'), (0x1FB00, 'V'), (0x1FB93, 'X'), (0x1FB94, 'V'), @@ -7774,16 +7942,11 @@ def _seg_73(): (0x1FBF7, 'M', '7'), (0x1FBF8, 'M', '8'), (0x1FBF9, 'M', '9'), - ] - -def _seg_74(): - # type: () -> List[Union[Tuple[int, str], Tuple[int, str, str]]] - return [ (0x1FBFA, 'X'), (0x20000, 'V'), - (0x2A6DE, 'X'), + (0x2A6E0, 'X'), (0x2A700, 'V'), - (0x2B735, 'X'), + (0x2B73A, 'X'), (0x2B740, 'V'), (0x2B81E, 'X'), (0x2B820, 'V'), @@ -7851,6 +8014,10 @@ def _seg_74(): (0x2F83C, 'M', '咞'), (0x2F83D, 'M', '吸'), (0x2F83E, 'M', '呈'), + ] + +def _seg_77() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x2F83F, 'M', '周'), (0x2F840, 'M', '咢'), (0x2F841, 'M', '哶'), @@ -7879,11 +8046,6 @@ def _seg_74(): (0x2F859, 'M', '𡓤'), (0x2F85A, 'M', '売'), (0x2F85B, 'M', '壷'), - ] - -def _seg_75(): - # type: () -> List[Union[Tuple[int, str], Tuple[int, str, str]]] - return [ (0x2F85C, 'M', '夆'), (0x2F85D, 'M', '多'), (0x2F85E, 'M', '夢'), @@ -7956,6 +8118,10 @@ def _seg_75(): (0x2F8A4, 'M', '𢛔'), (0x2F8A5, 'M', '惇'), (0x2F8A6, 'M', '慈'), + ] + +def _seg_78() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x2F8A7, 'M', '慌'), (0x2F8A8, 'M', '慎'), (0x2F8A9, 'M', '慌'), @@ -7984,11 +8150,6 @@ def _seg_75(): (0x2F8C0, 'M', '揅'), (0x2F8C1, 'M', '掩'), (0x2F8C2, 'M', '㨮'), - ] - -def _seg_76(): - # type: () -> List[Union[Tuple[int, str], Tuple[int, str, str]]] - return [ (0x2F8C3, 'M', '摩'), (0x2F8C4, 'M', '摾'), (0x2F8C5, 'M', '撝'), @@ -8061,6 +8222,10 @@ def _seg_76(): (0x2F908, 'M', '港'), (0x2F909, 'M', '湮'), (0x2F90A, 'M', '㴳'), + ] + +def _seg_79() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x2F90B, 'M', '滋'), (0x2F90C, 'M', '滇'), (0x2F90D, 'M', '𣻑'), @@ -8089,11 +8254,6 @@ def _seg_76(): (0x2F924, 'M', '犀'), (0x2F925, 'M', '犕'), (0x2F926, 'M', '𤜵'), - ] - -def _seg_77(): - # type: () -> List[Union[Tuple[int, str], Tuple[int, str, str]]] - return [ (0x2F927, 'M', '𤠔'), (0x2F928, 'M', '獺'), (0x2F929, 'M', '王'), @@ -8166,6 +8326,10 @@ def _seg_77(): (0x2F96F, 'M', '縂'), (0x2F970, 'M', '繅'), (0x2F971, 'M', '䌴'), + ] + +def _seg_80() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x2F972, 'M', '𦈨'), (0x2F973, 'M', '𦉇'), (0x2F974, 'M', '䍙'), @@ -8194,11 +8358,6 @@ def _seg_77(): (0x2F98B, 'M', '舁'), (0x2F98C, 'M', '舄'), (0x2F98D, 'M', '辞'), - ] - -def _seg_78(): - # type: () -> List[Union[Tuple[int, str], Tuple[int, str, str]]] - return [ (0x2F98E, 'M', '䑫'), (0x2F98F, 'M', '芑'), (0x2F990, 'M', '芋'), @@ -8271,6 +8430,10 @@ def _seg_78(): (0x2F9D3, 'M', '𧲨'), (0x2F9D4, 'M', '貫'), (0x2F9D5, 'M', '賁'), + ] + +def _seg_81() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x2F9D6, 'M', '贛'), (0x2F9D7, 'M', '起'), (0x2F9D8, 'M', '𧼯'), @@ -8299,11 +8462,6 @@ def _seg_78(): (0x2F9EF, 'M', '䦕'), (0x2F9F0, 'M', '閷'), (0x2F9F1, 'M', '𨵷'), - ] - -def _seg_79(): - # type: () -> List[Union[Tuple[int, str], Tuple[int, str, str]]] - return [ (0x2F9F2, 'M', '䧦'), (0x2F9F3, 'M', '雃'), (0x2F9F4, 'M', '嶲'), @@ -8350,6 +8508,8 @@ def _seg_79(): (0x2FA1E, 'X'), (0x30000, 'V'), (0x3134B, 'X'), + (0x31350, 'V'), + (0x323B0, 'X'), (0xE0100, 'I'), (0xE01F0, 'X'), ] @@ -8435,4 +8595,6 @@ def _seg_79(): + _seg_77() + _seg_78() + _seg_79() + + _seg_80() + + _seg_81() ) # type: Tuple[Union[Tuple[int, str], Tuple[int, str, str]], ...] diff --git a/packages/rfc3986/__init__.py b/packages/rfc3986/__init__.py deleted file mode 100644 index a052299c7..000000000 --- a/packages/rfc3986/__init__.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright (c) 2014 Rackspace -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -""" -An implementation of semantics and validations described in RFC 3986. - -See http://rfc3986.readthedocs.io/ for detailed documentation. - -:copyright: (c) 2014 Rackspace -:license: Apache v2.0, see LICENSE for details -""" - -from .api import iri_reference -from .api import IRIReference -from .api import is_valid_uri -from .api import normalize_uri -from .api import uri_reference -from .api import URIReference -from .api import urlparse -from .parseresult import ParseResult - -__title__ = "rfc3986" -__author__ = "Ian Stapleton Cordasco" -__author_email__ = "graffatcolmingov@gmail.com" -__license__ = "Apache v2.0" -__copyright__ = "Copyright 2014 Rackspace; 2016 Ian Stapleton Cordasco" -__version__ = "1.5.0" - -__all__ = ( - "ParseResult", - "URIReference", - "IRIReference", - "is_valid_uri", - "normalize_uri", - "uri_reference", - "iri_reference", - "urlparse", - "__title__", - "__author__", - "__author_email__", - "__license__", - "__copyright__", - "__version__", -) diff --git a/packages/rfc3986/_mixin.py b/packages/rfc3986/_mixin.py deleted file mode 100644 index 46e200e2f..000000000 --- a/packages/rfc3986/_mixin.py +++ /dev/null @@ -1,373 +0,0 @@ -"""Module containing the implementation of the URIMixin class.""" -import warnings - -from . import exceptions as exc -from . import misc -from . import normalizers -from . import validators - - -class URIMixin(object): - """Mixin with all shared methods for URIs and IRIs.""" - - __hash__ = tuple.__hash__ - - def authority_info(self): - """Return a dictionary with the ``userinfo``, ``host``, and ``port``. - - If the authority is not valid, it will raise a - :class:`~rfc3986.exceptions.InvalidAuthority` Exception. - - :returns: - ``{'userinfo': 'username:password', 'host': 'www.example.com', - 'port': '80'}`` - :rtype: dict - :raises rfc3986.exceptions.InvalidAuthority: - If the authority is not ``None`` and can not be parsed. - """ - if not self.authority: - return {"userinfo": None, "host": None, "port": None} - - match = self._match_subauthority() - - if match is None: - # In this case, we have an authority that was parsed from the URI - # Reference, but it cannot be further parsed by our - # misc.SUBAUTHORITY_MATCHER. In this case it must not be a valid - # authority. - raise exc.InvalidAuthority(self.authority.encode(self.encoding)) - - # We had a match, now let's ensure that it is actually a valid host - # address if it is IPv4 - matches = match.groupdict() - host = matches.get("host") - - if ( - host - and misc.IPv4_MATCHER.match(host) - and not validators.valid_ipv4_host_address(host) - ): - # If we have a host, it appears to be IPv4 and it does not have - # valid bytes, it is an InvalidAuthority. - raise exc.InvalidAuthority(self.authority.encode(self.encoding)) - - return matches - - def _match_subauthority(self): - return misc.SUBAUTHORITY_MATCHER.match(self.authority) - - @property - def host(self): - """If present, a string representing the host.""" - try: - authority = self.authority_info() - except exc.InvalidAuthority: - return None - return authority["host"] - - @property - def port(self): - """If present, the port extracted from the authority.""" - try: - authority = self.authority_info() - except exc.InvalidAuthority: - return None - return authority["port"] - - @property - def userinfo(self): - """If present, the userinfo extracted from the authority.""" - try: - authority = self.authority_info() - except exc.InvalidAuthority: - return None - return authority["userinfo"] - - def is_absolute(self): - """Determine if this URI Reference is an absolute URI. - - See http://tools.ietf.org/html/rfc3986#section-4.3 for explanation. - - :returns: ``True`` if it is an absolute URI, ``False`` otherwise. - :rtype: bool - """ - return bool(misc.ABSOLUTE_URI_MATCHER.match(self.unsplit())) - - def is_valid(self, **kwargs): - """Determine if the URI is valid. - - .. deprecated:: 1.1.0 - - Use the :class:`~rfc3986.validators.Validator` object instead. - - :param bool require_scheme: Set to ``True`` if you wish to require the - presence of the scheme component. - :param bool require_authority: Set to ``True`` if you wish to require - the presence of the authority component. - :param bool require_path: Set to ``True`` if you wish to require the - presence of the path component. - :param bool require_query: Set to ``True`` if you wish to require the - presence of the query component. - :param bool require_fragment: Set to ``True`` if you wish to require - the presence of the fragment component. - :returns: ``True`` if the URI is valid. ``False`` otherwise. - :rtype: bool - """ - warnings.warn( - "Please use rfc3986.validators.Validator instead. " - "This method will be eventually removed.", - DeprecationWarning, - ) - validators = [ - (self.scheme_is_valid, kwargs.get("require_scheme", False)), - (self.authority_is_valid, kwargs.get("require_authority", False)), - (self.path_is_valid, kwargs.get("require_path", False)), - (self.query_is_valid, kwargs.get("require_query", False)), - (self.fragment_is_valid, kwargs.get("require_fragment", False)), - ] - return all(v(r) for v, r in validators) - - def authority_is_valid(self, require=False): - """Determine if the authority component is valid. - - .. deprecated:: 1.1.0 - - Use the :class:`~rfc3986.validators.Validator` object instead. - - :param bool require: - Set to ``True`` to require the presence of this component. - :returns: - ``True`` if the authority is valid. ``False`` otherwise. - :rtype: - bool - """ - warnings.warn( - "Please use rfc3986.validators.Validator instead. " - "This method will be eventually removed.", - DeprecationWarning, - ) - try: - self.authority_info() - except exc.InvalidAuthority: - return False - - return validators.authority_is_valid( - self.authority, - host=self.host, - require=require, - ) - - def scheme_is_valid(self, require=False): - """Determine if the scheme component is valid. - - .. deprecated:: 1.1.0 - - Use the :class:`~rfc3986.validators.Validator` object instead. - - :param str require: Set to ``True`` to require the presence of this - component. - :returns: ``True`` if the scheme is valid. ``False`` otherwise. - :rtype: bool - """ - warnings.warn( - "Please use rfc3986.validators.Validator instead. " - "This method will be eventually removed.", - DeprecationWarning, - ) - return validators.scheme_is_valid(self.scheme, require) - - def path_is_valid(self, require=False): - """Determine if the path component is valid. - - .. deprecated:: 1.1.0 - - Use the :class:`~rfc3986.validators.Validator` object instead. - - :param str require: Set to ``True`` to require the presence of this - component. - :returns: ``True`` if the path is valid. ``False`` otherwise. - :rtype: bool - """ - warnings.warn( - "Please use rfc3986.validators.Validator instead. " - "This method will be eventually removed.", - DeprecationWarning, - ) - return validators.path_is_valid(self.path, require) - - def query_is_valid(self, require=False): - """Determine if the query component is valid. - - .. deprecated:: 1.1.0 - - Use the :class:`~rfc3986.validators.Validator` object instead. - - :param str require: Set to ``True`` to require the presence of this - component. - :returns: ``True`` if the query is valid. ``False`` otherwise. - :rtype: bool - """ - warnings.warn( - "Please use rfc3986.validators.Validator instead. " - "This method will be eventually removed.", - DeprecationWarning, - ) - return validators.query_is_valid(self.query, require) - - def fragment_is_valid(self, require=False): - """Determine if the fragment component is valid. - - .. deprecated:: 1.1.0 - - Use the Validator object instead. - - :param str require: Set to ``True`` to require the presence of this - component. - :returns: ``True`` if the fragment is valid. ``False`` otherwise. - :rtype: bool - """ - warnings.warn( - "Please use rfc3986.validators.Validator instead. " - "This method will be eventually removed.", - DeprecationWarning, - ) - return validators.fragment_is_valid(self.fragment, require) - - def normalized_equality(self, other_ref): - """Compare this URIReference to another URIReference. - - :param URIReference other_ref: (required), The reference with which - we're comparing. - :returns: ``True`` if the references are equal, ``False`` otherwise. - :rtype: bool - """ - return tuple(self.normalize()) == tuple(other_ref.normalize()) - - def resolve_with(self, base_uri, strict=False): - """Use an absolute URI Reference to resolve this relative reference. - - Assuming this is a relative reference that you would like to resolve, - use the provided base URI to resolve it. - - See http://tools.ietf.org/html/rfc3986#section-5 for more information. - - :param base_uri: Either a string or URIReference. It must be an - absolute URI or it will raise an exception. - :returns: A new URIReference which is the result of resolving this - reference using ``base_uri``. - :rtype: :class:`URIReference` - :raises rfc3986.exceptions.ResolutionError: - If the ``base_uri`` is not an absolute URI. - """ - if not isinstance(base_uri, URIMixin): - base_uri = type(self).from_string(base_uri) - - if not base_uri.is_absolute(): - raise exc.ResolutionError(base_uri) - - # This is optional per - # http://tools.ietf.org/html/rfc3986#section-5.2.1 - base_uri = base_uri.normalize() - - # The reference we're resolving - resolving = self - - if not strict and resolving.scheme == base_uri.scheme: - resolving = resolving.copy_with(scheme=None) - - # http://tools.ietf.org/html/rfc3986#page-32 - if resolving.scheme is not None: - target = resolving.copy_with( - path=normalizers.normalize_path(resolving.path) - ) - else: - if resolving.authority is not None: - target = resolving.copy_with( - scheme=base_uri.scheme, - path=normalizers.normalize_path(resolving.path), - ) - else: - if resolving.path is None: - if resolving.query is not None: - query = resolving.query - else: - query = base_uri.query - target = resolving.copy_with( - scheme=base_uri.scheme, - authority=base_uri.authority, - path=base_uri.path, - query=query, - ) - else: - if resolving.path.startswith("/"): - path = normalizers.normalize_path(resolving.path) - else: - path = normalizers.normalize_path( - misc.merge_paths(base_uri, resolving.path) - ) - target = resolving.copy_with( - scheme=base_uri.scheme, - authority=base_uri.authority, - path=path, - query=resolving.query, - ) - return target - - def unsplit(self): - """Create a URI string from the components. - - :returns: The URI Reference reconstituted as a string. - :rtype: str - """ - # See http://tools.ietf.org/html/rfc3986#section-5.3 - result_list = [] - if self.scheme: - result_list.extend([self.scheme, ":"]) - if self.authority: - result_list.extend(["//", self.authority]) - if self.path: - result_list.append(self.path) - if self.query is not None: - result_list.extend(["?", self.query]) - if self.fragment is not None: - result_list.extend(["#", self.fragment]) - return "".join(result_list) - - def copy_with( - self, - scheme=misc.UseExisting, - authority=misc.UseExisting, - path=misc.UseExisting, - query=misc.UseExisting, - fragment=misc.UseExisting, - ): - """Create a copy of this reference with the new components. - - :param str scheme: - (optional) The scheme to use for the new reference. - :param str authority: - (optional) The authority to use for the new reference. - :param str path: - (optional) The path to use for the new reference. - :param str query: - (optional) The query to use for the new reference. - :param str fragment: - (optional) The fragment to use for the new reference. - :returns: - New URIReference with provided components. - :rtype: - URIReference - """ - attributes = { - "scheme": scheme, - "authority": authority, - "path": path, - "query": query, - "fragment": fragment, - } - for key, value in list(attributes.items()): - if value is misc.UseExisting: - del attributes[key] - uri = self._replace(**attributes) - uri.encoding = self.encoding - return uri diff --git a/packages/rfc3986/abnf_regexp.py b/packages/rfc3986/abnf_regexp.py deleted file mode 100644 index a2e7ee7ab..000000000 --- a/packages/rfc3986/abnf_regexp.py +++ /dev/null @@ -1,282 +0,0 @@ -# -*- coding: utf-8 -*- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Module for the regular expressions crafted from ABNF.""" - -import sys - -# https://tools.ietf.org/html/rfc3986#page-13 -GEN_DELIMS = GENERIC_DELIMITERS = ":/?#[]@" -GENERIC_DELIMITERS_SET = set(GENERIC_DELIMITERS) -# https://tools.ietf.org/html/rfc3986#page-13 -SUB_DELIMS = SUB_DELIMITERS = "!$&'()*+,;=" -SUB_DELIMITERS_SET = set(SUB_DELIMITERS) -# Escape the '*' for use in regular expressions -SUB_DELIMITERS_RE = r"!$&'()\*+,;=" -RESERVED_CHARS_SET = GENERIC_DELIMITERS_SET.union(SUB_DELIMITERS_SET) -ALPHA = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz" -DIGIT = "0123456789" -# https://tools.ietf.org/html/rfc3986#section-2.3 -UNRESERVED = UNRESERVED_CHARS = ALPHA + DIGIT + r"._!-~" -UNRESERVED_CHARS_SET = set(UNRESERVED_CHARS) -NON_PCT_ENCODED_SET = RESERVED_CHARS_SET.union(UNRESERVED_CHARS_SET) -# We need to escape the '-' in this case: -UNRESERVED_RE = r"A-Za-z0-9._~\-" - -# Percent encoded character values -PERCENT_ENCODED = PCT_ENCODED = "%[A-Fa-f0-9]{2}" -PCHAR = "([" + UNRESERVED_RE + SUB_DELIMITERS_RE + ":@]|%s)" % PCT_ENCODED - -# NOTE(sigmavirus24): We're going to use more strict regular expressions -# than appear in Appendix B for scheme. This will prevent over-eager -# consuming of items that aren't schemes. -SCHEME_RE = "[a-zA-Z][a-zA-Z0-9+.-]*" -_AUTHORITY_RE = "[^\\\\/?#]*" -_PATH_RE = "[^?#]*" -_QUERY_RE = "[^#]*" -_FRAGMENT_RE = ".*" - -# Extracted from http://tools.ietf.org/html/rfc3986#appendix-B -COMPONENT_PATTERN_DICT = { - "scheme": SCHEME_RE, - "authority": _AUTHORITY_RE, - "path": _PATH_RE, - "query": _QUERY_RE, - "fragment": _FRAGMENT_RE, -} - -# See http://tools.ietf.org/html/rfc3986#appendix-B -# In this case, we name each of the important matches so we can use -# SRE_Match#groupdict to parse the values out if we so choose. This is also -# modified to ignore other matches that are not important to the parsing of -# the reference so we can also simply use SRE_Match#groups. -URL_PARSING_RE = ( - r"(?:(?P{scheme}):)?(?://(?P{authority}))?" - r"(?P{path})(?:\?(?P{query}))?" - r"(?:#(?P{fragment}))?" -).format(**COMPONENT_PATTERN_DICT) - - -# ######################### -# Authority Matcher Section -# ######################### - -# Host patterns, see: http://tools.ietf.org/html/rfc3986#section-3.2.2 -# The pattern for a regular name, e.g., www.google.com, api.github.com -REGULAR_NAME_RE = REG_NAME = "((?:{0}|[{1}])*)".format( - "%[0-9A-Fa-f]{2}", SUB_DELIMITERS_RE + UNRESERVED_RE -) -# The pattern for an IPv4 address, e.g., 192.168.255.255, 127.0.0.1, -IPv4_RE = r"([0-9]{1,3}\.){3}[0-9]{1,3}" -# Hexadecimal characters used in each piece of an IPv6 address -HEXDIG_RE = "[0-9A-Fa-f]{1,4}" -# Least-significant 32 bits of an IPv6 address -LS32_RE = "({hex}:{hex}|{ipv4})".format(hex=HEXDIG_RE, ipv4=IPv4_RE) -# Substitutions into the following patterns for IPv6 patterns defined -# http://tools.ietf.org/html/rfc3986#page-20 -_subs = {"hex": HEXDIG_RE, "ls32": LS32_RE} - -# Below: h16 = hexdig, see: https://tools.ietf.org/html/rfc5234 for details -# about ABNF (Augmented Backus-Naur Form) use in the comments -variations = [ - # 6( h16 ":" ) ls32 - "(%(hex)s:){6}%(ls32)s" % _subs, - # "::" 5( h16 ":" ) ls32 - "::(%(hex)s:){5}%(ls32)s" % _subs, - # [ h16 ] "::" 4( h16 ":" ) ls32 - "(%(hex)s)?::(%(hex)s:){4}%(ls32)s" % _subs, - # [ *1( h16 ":" ) h16 ] "::" 3( h16 ":" ) ls32 - "((%(hex)s:)?%(hex)s)?::(%(hex)s:){3}%(ls32)s" % _subs, - # [ *2( h16 ":" ) h16 ] "::" 2( h16 ":" ) ls32 - "((%(hex)s:){0,2}%(hex)s)?::(%(hex)s:){2}%(ls32)s" % _subs, - # [ *3( h16 ":" ) h16 ] "::" h16 ":" ls32 - "((%(hex)s:){0,3}%(hex)s)?::%(hex)s:%(ls32)s" % _subs, - # [ *4( h16 ":" ) h16 ] "::" ls32 - "((%(hex)s:){0,4}%(hex)s)?::%(ls32)s" % _subs, - # [ *5( h16 ":" ) h16 ] "::" h16 - "((%(hex)s:){0,5}%(hex)s)?::%(hex)s" % _subs, - # [ *6( h16 ":" ) h16 ] "::" - "((%(hex)s:){0,6}%(hex)s)?::" % _subs, -] - -IPv6_RE = "(({0})|({1})|({2})|({3})|({4})|({5})|({6})|({7})|({8}))".format( - *variations -) - -IPv_FUTURE_RE = r"v[0-9A-Fa-f]+\.[%s]+" % ( - UNRESERVED_RE + SUB_DELIMITERS_RE + ":" -) - -# RFC 6874 Zone ID ABNF -ZONE_ID = "(?:[" + UNRESERVED_RE + "]|" + PCT_ENCODED + ")+" - -IPv6_ADDRZ_RFC4007_RE = IPv6_RE + "(?:(?:%25|%)" + ZONE_ID + ")?" -IPv6_ADDRZ_RE = IPv6_RE + "(?:%25" + ZONE_ID + ")?" - -IP_LITERAL_RE = r"\[({0}|{1})\]".format( - IPv6_ADDRZ_RFC4007_RE, - IPv_FUTURE_RE, -) - -# Pattern for matching the host piece of the authority -HOST_RE = HOST_PATTERN = "({0}|{1}|{2})".format( - REG_NAME, - IPv4_RE, - IP_LITERAL_RE, -) -USERINFO_RE = ( - "^([" + UNRESERVED_RE + SUB_DELIMITERS_RE + ":]|%s)+" % (PCT_ENCODED) -) -PORT_RE = "[0-9]{1,5}" - -# #################### -# Path Matcher Section -# #################### - -# See http://tools.ietf.org/html/rfc3986#section-3.3 for more information -# about the path patterns defined below. -segments = { - "segment": PCHAR + "*", - # Non-zero length segment - "segment-nz": PCHAR + "+", - # Non-zero length segment without ":" - "segment-nz-nc": PCHAR.replace(":", "") + "+", -} - -# Path types taken from Section 3.3 (linked above) -PATH_EMPTY = "^$" -PATH_ROOTLESS = "%(segment-nz)s(/%(segment)s)*" % segments -PATH_NOSCHEME = "%(segment-nz-nc)s(/%(segment)s)*" % segments -PATH_ABSOLUTE = "/(%s)?" % PATH_ROOTLESS -PATH_ABEMPTY = "(/%(segment)s)*" % segments -PATH_RE = "^(%s|%s|%s|%s|%s)$" % ( - PATH_ABEMPTY, - PATH_ABSOLUTE, - PATH_NOSCHEME, - PATH_ROOTLESS, - PATH_EMPTY, -) - -FRAGMENT_RE = QUERY_RE = ( - "^([/?:@" + UNRESERVED_RE + SUB_DELIMITERS_RE + "]|%s)*$" % PCT_ENCODED -) - -# ########################## -# Relative reference matcher -# ########################## - -# See http://tools.ietf.org/html/rfc3986#section-4.2 for details -RELATIVE_PART_RE = "(//%s%s|%s|%s|%s)" % ( - COMPONENT_PATTERN_DICT["authority"], - PATH_ABEMPTY, - PATH_ABSOLUTE, - PATH_NOSCHEME, - PATH_EMPTY, -) - -# See http://tools.ietf.org/html/rfc3986#section-3 for definition -HIER_PART_RE = "(//%s%s|%s|%s|%s)" % ( - COMPONENT_PATTERN_DICT["authority"], - PATH_ABEMPTY, - PATH_ABSOLUTE, - PATH_ROOTLESS, - PATH_EMPTY, -) - -# ############### -# IRIs / RFC 3987 -# ############### - -# Only wide-unicode gets the high-ranges of UCSCHAR -if sys.maxunicode > 0xFFFF: # pragma: no cover - IPRIVATE = u"\uE000-\uF8FF\U000F0000-\U000FFFFD\U00100000-\U0010FFFD" - UCSCHAR_RE = ( - u"\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF" - u"\U00010000-\U0001FFFD\U00020000-\U0002FFFD" - u"\U00030000-\U0003FFFD\U00040000-\U0004FFFD" - u"\U00050000-\U0005FFFD\U00060000-\U0006FFFD" - u"\U00070000-\U0007FFFD\U00080000-\U0008FFFD" - u"\U00090000-\U0009FFFD\U000A0000-\U000AFFFD" - u"\U000B0000-\U000BFFFD\U000C0000-\U000CFFFD" - u"\U000D0000-\U000DFFFD\U000E1000-\U000EFFFD" - ) -else: # pragma: no cover - IPRIVATE = u"\uE000-\uF8FF" - UCSCHAR_RE = u"\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF" - -IUNRESERVED_RE = u"A-Za-z0-9\\._~\\-" + UCSCHAR_RE -IPCHAR = u"([" + IUNRESERVED_RE + SUB_DELIMITERS_RE + u":@]|%s)" % PCT_ENCODED - -isegments = { - "isegment": IPCHAR + u"*", - # Non-zero length segment - "isegment-nz": IPCHAR + u"+", - # Non-zero length segment without ":" - "isegment-nz-nc": IPCHAR.replace(":", "") + u"+", -} - -IPATH_ROOTLESS = u"%(isegment-nz)s(/%(isegment)s)*" % isegments -IPATH_NOSCHEME = u"%(isegment-nz-nc)s(/%(isegment)s)*" % isegments -IPATH_ABSOLUTE = u"/(?:%s)?" % IPATH_ROOTLESS -IPATH_ABEMPTY = u"(?:/%(isegment)s)*" % isegments -IPATH_RE = u"^(?:%s|%s|%s|%s|%s)$" % ( - IPATH_ABEMPTY, - IPATH_ABSOLUTE, - IPATH_NOSCHEME, - IPATH_ROOTLESS, - PATH_EMPTY, -) - -IREGULAR_NAME_RE = IREG_NAME = u"(?:{0}|[{1}])*".format( - u"%[0-9A-Fa-f]{2}", SUB_DELIMITERS_RE + IUNRESERVED_RE -) - -IHOST_RE = IHOST_PATTERN = u"({0}|{1}|{2})".format( - IREG_NAME, - IPv4_RE, - IP_LITERAL_RE, -) - -IUSERINFO_RE = ( - u"^(?:[" + IUNRESERVED_RE + SUB_DELIMITERS_RE + u":]|%s)+" % (PCT_ENCODED) -) - -IFRAGMENT_RE = ( - u"^(?:[/?:@" - + IUNRESERVED_RE - + SUB_DELIMITERS_RE - + u"]|%s)*$" % PCT_ENCODED -) -IQUERY_RE = ( - u"^(?:[/?:@" - + IUNRESERVED_RE - + SUB_DELIMITERS_RE - + IPRIVATE - + u"]|%s)*$" % PCT_ENCODED -) - -IRELATIVE_PART_RE = u"(//%s%s|%s|%s|%s)" % ( - COMPONENT_PATTERN_DICT["authority"], - IPATH_ABEMPTY, - IPATH_ABSOLUTE, - IPATH_NOSCHEME, - PATH_EMPTY, -) - -IHIER_PART_RE = u"(//%s%s|%s|%s|%s)" % ( - COMPONENT_PATTERN_DICT["authority"], - IPATH_ABEMPTY, - IPATH_ABSOLUTE, - IPATH_ROOTLESS, - PATH_EMPTY, -) diff --git a/packages/rfc3986/api.py b/packages/rfc3986/api.py deleted file mode 100644 index 1e098b34f..000000000 --- a/packages/rfc3986/api.py +++ /dev/null @@ -1,106 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright (c) 2014 Rackspace -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. -""" -Module containing the simple and functional API for rfc3986. - -This module defines functions and provides access to the public attributes -and classes of rfc3986. -""" - -from .iri import IRIReference -from .parseresult import ParseResult -from .uri import URIReference - - -def uri_reference(uri, encoding="utf-8"): - """Parse a URI string into a URIReference. - - This is a convenience function. You could achieve the same end by using - ``URIReference.from_string(uri)``. - - :param str uri: The URI which needs to be parsed into a reference. - :param str encoding: The encoding of the string provided - :returns: A parsed URI - :rtype: :class:`URIReference` - """ - return URIReference.from_string(uri, encoding) - - -def iri_reference(iri, encoding="utf-8"): - """Parse a IRI string into an IRIReference. - - This is a convenience function. You could achieve the same end by using - ``IRIReference.from_string(iri)``. - - :param str iri: The IRI which needs to be parsed into a reference. - :param str encoding: The encoding of the string provided - :returns: A parsed IRI - :rtype: :class:`IRIReference` - """ - return IRIReference.from_string(iri, encoding) - - -def is_valid_uri(uri, encoding="utf-8", **kwargs): - """Determine if the URI given is valid. - - This is a convenience function. You could use either - ``uri_reference(uri).is_valid()`` or - ``URIReference.from_string(uri).is_valid()`` to achieve the same result. - - :param str uri: The URI to be validated. - :param str encoding: The encoding of the string provided - :param bool require_scheme: Set to ``True`` if you wish to require the - presence of the scheme component. - :param bool require_authority: Set to ``True`` if you wish to require the - presence of the authority component. - :param bool require_path: Set to ``True`` if you wish to require the - presence of the path component. - :param bool require_query: Set to ``True`` if you wish to require the - presence of the query component. - :param bool require_fragment: Set to ``True`` if you wish to require the - presence of the fragment component. - :returns: ``True`` if the URI is valid, ``False`` otherwise. - :rtype: bool - """ - return URIReference.from_string(uri, encoding).is_valid(**kwargs) - - -def normalize_uri(uri, encoding="utf-8"): - """Normalize the given URI. - - This is a convenience function. You could use either - ``uri_reference(uri).normalize().unsplit()`` or - ``URIReference.from_string(uri).normalize().unsplit()`` instead. - - :param str uri: The URI to be normalized. - :param str encoding: The encoding of the string provided - :returns: The normalized URI. - :rtype: str - """ - normalized_reference = URIReference.from_string(uri, encoding).normalize() - return normalized_reference.unsplit() - - -def urlparse(uri, encoding="utf-8"): - """Parse a given URI and return a ParseResult. - - This is a partial replacement of the standard library's urlparse function. - - :param str uri: The URI to be parsed. - :param str encoding: The encoding of the string provided. - :returns: A parsed URI - :rtype: :class:`~rfc3986.parseresult.ParseResult` - """ - return ParseResult.from_string(uri, encoding, strict=False) diff --git a/packages/rfc3986/builder.py b/packages/rfc3986/builder.py deleted file mode 100644 index 8fc178c6c..000000000 --- a/packages/rfc3986/builder.py +++ /dev/null @@ -1,389 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright (c) 2017 Ian Stapleton Cordasco -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Module containing the logic for the URIBuilder object.""" -from . import compat -from . import normalizers -from . import uri -from . import uri_reference - - -class URIBuilder(object): - """Object to aid in building up a URI Reference from parts. - - .. note:: - - This object should be instantiated by the user, but it's recommended - that it is not provided with arguments. Instead, use the available - method to populate the fields. - - """ - - def __init__( - self, - scheme=None, - userinfo=None, - host=None, - port=None, - path=None, - query=None, - fragment=None, - ): - """Initialize our URI builder. - - :param str scheme: - (optional) - :param str userinfo: - (optional) - :param str host: - (optional) - :param int port: - (optional) - :param str path: - (optional) - :param str query: - (optional) - :param str fragment: - (optional) - """ - self.scheme = scheme - self.userinfo = userinfo - self.host = host - self.port = port - self.path = path - self.query = query - self.fragment = fragment - - def __repr__(self): - """Provide a convenient view of our builder object.""" - formatstr = ( - "URIBuilder(scheme={b.scheme}, userinfo={b.userinfo}, " - "host={b.host}, port={b.port}, path={b.path}, " - "query={b.query}, fragment={b.fragment})" - ) - return formatstr.format(b=self) - - @classmethod - def from_uri(cls, reference): - """Initialize the URI builder from another URI. - - Takes the given URI reference and creates a new URI builder instance - populated with the values from the reference. If given a string it will - try to convert it to a reference before constructing the builder. - """ - if not isinstance(reference, uri.URIReference): - reference = uri_reference(reference) - return cls( - scheme=reference.scheme, - userinfo=reference.userinfo, - host=reference.host, - port=reference.port, - path=reference.path, - query=reference.query, - fragment=reference.fragment, - ) - - def add_scheme(self, scheme): - """Add a scheme to our builder object. - - After normalizing, this will generate a new URIBuilder instance with - the specified scheme and all other attributes the same. - - .. code-block:: python - - >>> URIBuilder().add_scheme('HTTPS') - URIBuilder(scheme='https', userinfo=None, host=None, port=None, - path=None, query=None, fragment=None) - - """ - scheme = normalizers.normalize_scheme(scheme) - return URIBuilder( - scheme=scheme, - userinfo=self.userinfo, - host=self.host, - port=self.port, - path=self.path, - query=self.query, - fragment=self.fragment, - ) - - def add_credentials(self, username, password): - """Add credentials as the userinfo portion of the URI. - - .. code-block:: python - - >>> URIBuilder().add_credentials('root', 's3crete') - URIBuilder(scheme=None, userinfo='root:s3crete', host=None, - port=None, path=None, query=None, fragment=None) - - >>> URIBuilder().add_credentials('root', None) - URIBuilder(scheme=None, userinfo='root', host=None, - port=None, path=None, query=None, fragment=None) - """ - if username is None: - raise ValueError("Username cannot be None") - userinfo = normalizers.normalize_username(username) - - if password is not None: - userinfo = "{}:{}".format( - userinfo, - normalizers.normalize_password(password), - ) - - return URIBuilder( - scheme=self.scheme, - userinfo=userinfo, - host=self.host, - port=self.port, - path=self.path, - query=self.query, - fragment=self.fragment, - ) - - def add_host(self, host): - """Add hostname to the URI. - - .. code-block:: python - - >>> URIBuilder().add_host('google.com') - URIBuilder(scheme=None, userinfo=None, host='google.com', - port=None, path=None, query=None, fragment=None) - - """ - return URIBuilder( - scheme=self.scheme, - userinfo=self.userinfo, - host=normalizers.normalize_host(host), - port=self.port, - path=self.path, - query=self.query, - fragment=self.fragment, - ) - - def add_port(self, port): - """Add port to the URI. - - .. code-block:: python - - >>> URIBuilder().add_port(80) - URIBuilder(scheme=None, userinfo=None, host=None, port='80', - path=None, query=None, fragment=None) - - >>> URIBuilder().add_port(443) - URIBuilder(scheme=None, userinfo=None, host=None, port='443', - path=None, query=None, fragment=None) - - """ - port_int = int(port) - if port_int < 0: - raise ValueError( - "ports are not allowed to be negative. You provided {}".format( - port_int, - ) - ) - if port_int > 65535: - raise ValueError( - "ports are not allowed to be larger than 65535. " - "You provided {}".format( - port_int, - ) - ) - - return URIBuilder( - scheme=self.scheme, - userinfo=self.userinfo, - host=self.host, - port="{}".format(port_int), - path=self.path, - query=self.query, - fragment=self.fragment, - ) - - def add_path(self, path): - """Add a path to the URI. - - .. code-block:: python - - >>> URIBuilder().add_path('sigmavirus24/rfc3985') - URIBuilder(scheme=None, userinfo=None, host=None, port=None, - path='/sigmavirus24/rfc3986', query=None, fragment=None) - - >>> URIBuilder().add_path('/checkout.php') - URIBuilder(scheme=None, userinfo=None, host=None, port=None, - path='/checkout.php', query=None, fragment=None) - - """ - if not path.startswith("/"): - path = "/{}".format(path) - - return URIBuilder( - scheme=self.scheme, - userinfo=self.userinfo, - host=self.host, - port=self.port, - path=normalizers.normalize_path(path), - query=self.query, - fragment=self.fragment, - ) - - def extend_path(self, path): - """Extend the existing path value with the provided value. - - .. versionadded:: 1.5.0 - - .. code-block:: python - - >>> URIBuilder(path="/users").extend_path("/sigmavirus24") - URIBuilder(scheme=None, userinfo=None, host=None, port=None, - path='/users/sigmavirus24', query=None, fragment=None) - - >>> URIBuilder(path="/users/").extend_path("/sigmavirus24") - URIBuilder(scheme=None, userinfo=None, host=None, port=None, - path='/users/sigmavirus24', query=None, fragment=None) - - >>> URIBuilder(path="/users/").extend_path("sigmavirus24") - URIBuilder(scheme=None, userinfo=None, host=None, port=None, - path='/users/sigmavirus24', query=None, fragment=None) - - >>> URIBuilder(path="/users").extend_path("sigmavirus24") - URIBuilder(scheme=None, userinfo=None, host=None, port=None, - path='/users/sigmavirus24', query=None, fragment=None) - - """ - existing_path = self.path or "" - path = "{}/{}".format(existing_path.rstrip("/"), path.lstrip("/")) - - return self.add_path(path) - - def add_query_from(self, query_items): - """Generate and add a query a dictionary or list of tuples. - - .. code-block:: python - - >>> URIBuilder().add_query_from({'a': 'b c'}) - URIBuilder(scheme=None, userinfo=None, host=None, port=None, - path=None, query='a=b+c', fragment=None) - - >>> URIBuilder().add_query_from([('a', 'b c')]) - URIBuilder(scheme=None, userinfo=None, host=None, port=None, - path=None, query='a=b+c', fragment=None) - - """ - query = normalizers.normalize_query(compat.urlencode(query_items)) - - return URIBuilder( - scheme=self.scheme, - userinfo=self.userinfo, - host=self.host, - port=self.port, - path=self.path, - query=query, - fragment=self.fragment, - ) - - def extend_query_with(self, query_items): - """Extend the existing query string with the new query items. - - .. versionadded:: 1.5.0 - - .. code-block:: python - - >>> URIBuilder(query='a=b+c').extend_query_with({'a': 'b c'}) - URIBuilder(scheme=None, userinfo=None, host=None, port=None, - path=None, query='a=b+c&a=b+c', fragment=None) - - >>> URIBuilder(query='a=b+c').extend_query_with([('a', 'b c')]) - URIBuilder(scheme=None, userinfo=None, host=None, port=None, - path=None, query='a=b+c&a=b+c', fragment=None) - """ - original_query_items = compat.parse_qsl(self.query or "") - if not isinstance(query_items, list): - query_items = list(query_items.items()) - - return self.add_query_from(original_query_items + query_items) - - def add_query(self, query): - """Add a pre-formated query string to the URI. - - .. code-block:: python - - >>> URIBuilder().add_query('a=b&c=d') - URIBuilder(scheme=None, userinfo=None, host=None, port=None, - path=None, query='a=b&c=d', fragment=None) - - """ - return URIBuilder( - scheme=self.scheme, - userinfo=self.userinfo, - host=self.host, - port=self.port, - path=self.path, - query=normalizers.normalize_query(query), - fragment=self.fragment, - ) - - def add_fragment(self, fragment): - """Add a fragment to the URI. - - .. code-block:: python - - >>> URIBuilder().add_fragment('section-2.6.1') - URIBuilder(scheme=None, userinfo=None, host=None, port=None, - path=None, query=None, fragment='section-2.6.1') - - """ - return URIBuilder( - scheme=self.scheme, - userinfo=self.userinfo, - host=self.host, - port=self.port, - path=self.path, - query=self.query, - fragment=normalizers.normalize_fragment(fragment), - ) - - def finalize(self): - """Create a URIReference from our builder. - - .. code-block:: python - - >>> URIBuilder().add_scheme('https').add_host('github.com' - ... ).add_path('sigmavirus24/rfc3986').finalize().unsplit() - 'https://github.com/sigmavirus24/rfc3986' - - >>> URIBuilder().add_scheme('https').add_host('github.com' - ... ).add_path('sigmavirus24/rfc3986').add_credentials( - ... 'sigmavirus24', 'not-re@l').finalize().unsplit() - 'https://sigmavirus24:not-re%40l@github.com/sigmavirus24/rfc3986' - - """ - return uri.URIReference( - self.scheme, - normalizers.normalize_authority( - (self.userinfo, self.host, self.port) - ), - self.path, - self.query, - self.fragment, - ) - - def geturl(self): - """Generate the URL from this builder. - - .. versionadded:: 1.5.0 - - This is an alternative to calling :meth:`finalize` and keeping the - :class:`rfc3986.uri.URIReference` around. - """ - return self.finalize().unsplit() diff --git a/packages/rfc3986/compat.py b/packages/rfc3986/compat.py deleted file mode 100644 index 83e5c784b..000000000 --- a/packages/rfc3986/compat.py +++ /dev/null @@ -1,60 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright (c) 2014 Rackspace -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Compatibility module for Python 2 and 3 support.""" -import sys - -try: - from urllib.parse import quote as urlquote -except ImportError: # Python 2.x - from urllib import quote as urlquote - -try: - from urllib.parse import parse_qsl -except ImportError: # Python 2.x - from urlparse import parse_qsl - -try: - from urllib.parse import urlencode -except ImportError: # Python 2.x - from urllib import urlencode - -__all__ = ( - "to_bytes", - "to_str", - "urlquote", - "urlencode", - "parse_qsl", -) - -PY3 = (3, 0) <= sys.version_info < (4, 0) -PY2 = (2, 6) <= sys.version_info < (2, 8) - - -if PY3: - unicode = str # Python 3.x - - -def to_str(b, encoding="utf-8"): - """Ensure that b is text in the specified encoding.""" - if hasattr(b, "decode") and not isinstance(b, unicode): - b = b.decode(encoding) - return b - - -def to_bytes(s, encoding="utf-8"): - """Ensure that s is converted to bytes from the encoding.""" - if hasattr(s, "encode") and not isinstance(s, bytes): - s = s.encode(encoding) - return s diff --git a/packages/rfc3986/exceptions.py b/packages/rfc3986/exceptions.py deleted file mode 100644 index b117bc9ca..000000000 --- a/packages/rfc3986/exceptions.py +++ /dev/null @@ -1,124 +0,0 @@ -# -*- coding: utf-8 -*- -"""Exceptions module for rfc3986.""" - -from . import compat - - -class RFC3986Exception(Exception): - """Base class for all rfc3986 exception classes.""" - - pass - - -class InvalidAuthority(RFC3986Exception): - """Exception when the authority string is invalid.""" - - def __init__(self, authority): - """Initialize the exception with the invalid authority.""" - super(InvalidAuthority, self).__init__( - u"The authority ({0}) is not valid.".format( - compat.to_str(authority) - ) - ) - - -class InvalidPort(RFC3986Exception): - """Exception when the port is invalid.""" - - def __init__(self, port): - """Initialize the exception with the invalid port.""" - super(InvalidPort, self).__init__( - 'The port ("{0}") is not valid.'.format(port) - ) - - -class ResolutionError(RFC3986Exception): - """Exception to indicate a failure to resolve a URI.""" - - def __init__(self, uri): - """Initialize the error with the failed URI.""" - super(ResolutionError, self).__init__( - "{0} is not an absolute URI.".format(uri.unsplit()) - ) - - -class ValidationError(RFC3986Exception): - """Exception raised during Validation of a URI.""" - - pass - - -class MissingComponentError(ValidationError): - """Exception raised when a required component is missing.""" - - def __init__(self, uri, *component_names): - """Initialize the error with the missing component name.""" - verb = "was" - if len(component_names) > 1: - verb = "were" - - self.uri = uri - self.components = sorted(component_names) - components = ", ".join(self.components) - super(MissingComponentError, self).__init__( - "{} {} required but missing".format(components, verb), - uri, - self.components, - ) - - -class UnpermittedComponentError(ValidationError): - """Exception raised when a component has an unpermitted value.""" - - def __init__(self, component_name, component_value, allowed_values): - """Initialize the error with the unpermitted component.""" - super(UnpermittedComponentError, self).__init__( - "{} was required to be one of {!r} but was {!r}".format( - component_name, - list(sorted(allowed_values)), - component_value, - ), - component_name, - component_value, - allowed_values, - ) - self.component_name = component_name - self.component_value = component_value - self.allowed_values = allowed_values - - -class PasswordForbidden(ValidationError): - """Exception raised when a URL has a password in the userinfo section.""" - - def __init__(self, uri): - """Initialize the error with the URI that failed validation.""" - unsplit = getattr(uri, "unsplit", lambda: uri) - super(PasswordForbidden, self).__init__( - '"{}" contained a password when validation forbade it'.format( - unsplit() - ) - ) - self.uri = uri - - -class InvalidComponentsError(ValidationError): - """Exception raised when one or more components are invalid.""" - - def __init__(self, uri, *component_names): - """Initialize the error with the invalid component name(s).""" - verb = "was" - if len(component_names) > 1: - verb = "were" - - self.uri = uri - self.components = sorted(component_names) - components = ", ".join(self.components) - super(InvalidComponentsError, self).__init__( - "{} {} found to be invalid".format(components, verb), - uri, - self.components, - ) - - -class MissingDependencyError(RFC3986Exception): - """Exception raised when an IRI is encoded without the 'idna' module.""" diff --git a/packages/rfc3986/iri.py b/packages/rfc3986/iri.py deleted file mode 100644 index 540aa7bc2..000000000 --- a/packages/rfc3986/iri.py +++ /dev/null @@ -1,162 +0,0 @@ -"""Module containing the implementation of the IRIReference class.""" -# -*- coding: utf-8 -*- -# Copyright (c) 2014 Rackspace -# Copyright (c) 2015 Ian Stapleton Cordasco -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. -from collections import namedtuple - -from . import compat -from . import exceptions -from . import misc -from . import normalizers -from . import uri - - -try: - import idna -except ImportError: # pragma: no cover - idna = None - - -class IRIReference( - namedtuple("IRIReference", misc.URI_COMPONENTS), uri.URIMixin -): - """Immutable object representing a parsed IRI Reference. - - Can be encoded into an URIReference object via the procedure - specified in RFC 3987 Section 3.1 - - .. note:: - The IRI submodule is a new interface and may possibly change in - the future. Check for changes to the interface when upgrading. - """ - - slots = () - - def __new__( - cls, scheme, authority, path, query, fragment, encoding="utf-8" - ): - """Create a new IRIReference.""" - ref = super(IRIReference, cls).__new__( - cls, - scheme or None, - authority or None, - path or None, - query, - fragment, - ) - ref.encoding = encoding - return ref - - def __eq__(self, other): - """Compare this reference to another.""" - other_ref = other - if isinstance(other, tuple): - other_ref = self.__class__(*other) - elif not isinstance(other, IRIReference): - try: - other_ref = self.__class__.from_string(other) - except TypeError: - raise TypeError( - "Unable to compare {0}() to {1}()".format( - type(self).__name__, type(other).__name__ - ) - ) - - # See http://tools.ietf.org/html/rfc3986#section-6.2 - return tuple(self) == tuple(other_ref) - - def _match_subauthority(self): - return misc.ISUBAUTHORITY_MATCHER.match(self.authority) - - @classmethod - def from_string(cls, iri_string, encoding="utf-8"): - """Parse a IRI reference from the given unicode IRI string. - - :param str iri_string: Unicode IRI to be parsed into a reference. - :param str encoding: The encoding of the string provided - :returns: :class:`IRIReference` or subclass thereof - """ - iri_string = compat.to_str(iri_string, encoding) - - split_iri = misc.IRI_MATCHER.match(iri_string).groupdict() - return cls( - split_iri["scheme"], - split_iri["authority"], - normalizers.encode_component(split_iri["path"], encoding), - normalizers.encode_component(split_iri["query"], encoding), - normalizers.encode_component(split_iri["fragment"], encoding), - encoding, - ) - - def encode(self, idna_encoder=None): # noqa: C901 - """Encode an IRIReference into a URIReference instance. - - If the ``idna`` module is installed or the ``rfc3986[idna]`` - extra is used then unicode characters in the IRI host - component will be encoded with IDNA2008. - - :param idna_encoder: - Function that encodes each part of the host component - If not given will raise an exception if the IRI - contains a host component. - :rtype: uri.URIReference - :returns: A URI reference - """ - authority = self.authority - if authority: - if idna_encoder is None: - if idna is None: # pragma: no cover - raise exceptions.MissingDependencyError( - "Could not import the 'idna' module " - "and the IRI hostname requires encoding" - ) - - def idna_encoder(name): - if any(ord(c) > 128 for c in name): - try: - return idna.encode( - name.lower(), strict=True, std3_rules=True - ) - except idna.IDNAError: - raise exceptions.InvalidAuthority(self.authority) - return name - - authority = "" - if self.host: - authority = ".".join( - [ - compat.to_str(idna_encoder(part)) - for part in self.host.split(".") - ] - ) - - if self.userinfo is not None: - authority = ( - normalizers.encode_component(self.userinfo, self.encoding) - + "@" - + authority - ) - - if self.port is not None: - authority += ":" + str(self.port) - - return uri.URIReference( - self.scheme, - authority, - path=self.path, - query=self.query, - fragment=self.fragment, - encoding=self.encoding, - ) diff --git a/packages/rfc3986/misc.py b/packages/rfc3986/misc.py deleted file mode 100644 index 338b18794..000000000 --- a/packages/rfc3986/misc.py +++ /dev/null @@ -1,135 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright (c) 2014 Rackspace -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. -""" -Module containing compiled regular expressions and constants. - -This module contains important constants, patterns, and compiled regular -expressions for parsing and validating URIs and their components. -""" - -import re - -from . import abnf_regexp - -# These are enumerated for the named tuple used as a superclass of -# URIReference -URI_COMPONENTS = ["scheme", "authority", "path", "query", "fragment"] - -important_characters = { - "generic_delimiters": abnf_regexp.GENERIC_DELIMITERS, - "sub_delimiters": abnf_regexp.SUB_DELIMITERS, - # We need to escape the '*' in this case - "re_sub_delimiters": abnf_regexp.SUB_DELIMITERS_RE, - "unreserved_chars": abnf_regexp.UNRESERVED_CHARS, - # We need to escape the '-' in this case: - "re_unreserved": abnf_regexp.UNRESERVED_RE, -} - -# For details about delimiters and reserved characters, see: -# http://tools.ietf.org/html/rfc3986#section-2.2 -GENERIC_DELIMITERS = abnf_regexp.GENERIC_DELIMITERS_SET -SUB_DELIMITERS = abnf_regexp.SUB_DELIMITERS_SET -RESERVED_CHARS = abnf_regexp.RESERVED_CHARS_SET -# For details about unreserved characters, see: -# http://tools.ietf.org/html/rfc3986#section-2.3 -UNRESERVED_CHARS = abnf_regexp.UNRESERVED_CHARS_SET -NON_PCT_ENCODED = abnf_regexp.NON_PCT_ENCODED_SET - -URI_MATCHER = re.compile(abnf_regexp.URL_PARSING_RE) - -SUBAUTHORITY_MATCHER = re.compile( - ( - "^(?:(?P{0})@)?" # userinfo - "(?P{1})" # host - ":?(?P{2})?$" # port - ).format( - abnf_regexp.USERINFO_RE, abnf_regexp.HOST_PATTERN, abnf_regexp.PORT_RE - ) -) - - -HOST_MATCHER = re.compile("^" + abnf_regexp.HOST_RE + "$") -IPv4_MATCHER = re.compile("^" + abnf_regexp.IPv4_RE + "$") -IPv6_MATCHER = re.compile(r"^\[" + abnf_regexp.IPv6_ADDRZ_RFC4007_RE + r"\]$") - -# Used by host validator -IPv6_NO_RFC4007_MATCHER = re.compile( - r"^\[%s\]$" % (abnf_regexp.IPv6_ADDRZ_RE) -) - -# Matcher used to validate path components -PATH_MATCHER = re.compile(abnf_regexp.PATH_RE) - - -# ################################## -# Query and Fragment Matcher Section -# ################################## - -QUERY_MATCHER = re.compile(abnf_regexp.QUERY_RE) - -FRAGMENT_MATCHER = QUERY_MATCHER - -# Scheme validation, see: http://tools.ietf.org/html/rfc3986#section-3.1 -SCHEME_MATCHER = re.compile("^{0}$".format(abnf_regexp.SCHEME_RE)) - -RELATIVE_REF_MATCHER = re.compile( - r"^%s(\?%s)?(#%s)?$" - % ( - abnf_regexp.RELATIVE_PART_RE, - abnf_regexp.QUERY_RE, - abnf_regexp.FRAGMENT_RE, - ) -) - -# See http://tools.ietf.org/html/rfc3986#section-4.3 -ABSOLUTE_URI_MATCHER = re.compile( - r"^%s:%s(\?%s)?$" - % ( - abnf_regexp.COMPONENT_PATTERN_DICT["scheme"], - abnf_regexp.HIER_PART_RE, - abnf_regexp.QUERY_RE[1:-1], - ) -) - -# ############### -# IRIs / RFC 3987 -# ############### - -IRI_MATCHER = re.compile(abnf_regexp.URL_PARSING_RE, re.UNICODE) - -ISUBAUTHORITY_MATCHER = re.compile( - ( - u"^(?:(?P{0})@)?" # iuserinfo - u"(?P{1})" # ihost - u":?(?P{2})?$" # port - ).format( - abnf_regexp.IUSERINFO_RE, abnf_regexp.IHOST_RE, abnf_regexp.PORT_RE - ), - re.UNICODE, -) - - -# Path merger as defined in http://tools.ietf.org/html/rfc3986#section-5.2.3 -def merge_paths(base_uri, relative_path): - """Merge a base URI's path with a relative URI's path.""" - if base_uri.path is None and base_uri.authority is not None: - return "/" + relative_path - else: - path = base_uri.path or "" - index = path.rfind("/") - return path[:index] + "/" + relative_path - - -UseExisting = object() diff --git a/packages/rfc3986/normalizers.py b/packages/rfc3986/normalizers.py deleted file mode 100644 index 0d702b6d4..000000000 --- a/packages/rfc3986/normalizers.py +++ /dev/null @@ -1,172 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright (c) 2014 Rackspace -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Module with functions to normalize components.""" -import re - -from . import compat -from . import misc - - -def normalize_scheme(scheme): - """Normalize the scheme component.""" - return scheme.lower() - - -def normalize_authority(authority): - """Normalize an authority tuple to a string.""" - userinfo, host, port = authority - result = "" - if userinfo: - result += normalize_percent_characters(userinfo) + "@" - if host: - result += normalize_host(host) - if port: - result += ":" + port - return result - - -def normalize_username(username): - """Normalize a username to make it safe to include in userinfo.""" - return compat.urlquote(username) - - -def normalize_password(password): - """Normalize a password to make safe for userinfo.""" - return compat.urlquote(password) - - -def normalize_host(host): - """Normalize a host string.""" - if misc.IPv6_MATCHER.match(host): - percent = host.find("%") - if percent != -1: - percent_25 = host.find("%25") - - # Replace RFC 4007 IPv6 Zone ID delimiter '%' with '%25' - # from RFC 6874. If the host is '[%25]' then we - # assume RFC 4007 and normalize to '[%2525]' - if ( - percent_25 == -1 - or percent < percent_25 - or (percent == percent_25 and percent_25 == len(host) - 4) - ): - host = host.replace("%", "%25", 1) - - # Don't normalize the casing of the Zone ID - return host[:percent].lower() + host[percent:] - - return host.lower() - - -def normalize_path(path): - """Normalize the path string.""" - if not path: - return path - - path = normalize_percent_characters(path) - return remove_dot_segments(path) - - -def normalize_query(query): - """Normalize the query string.""" - if not query: - return query - return normalize_percent_characters(query) - - -def normalize_fragment(fragment): - """Normalize the fragment string.""" - if not fragment: - return fragment - return normalize_percent_characters(fragment) - - -PERCENT_MATCHER = re.compile("%[A-Fa-f0-9]{2}") - - -def normalize_percent_characters(s): - """All percent characters should be upper-cased. - - For example, ``"%3afoo%DF%ab"`` should be turned into ``"%3Afoo%DF%AB"``. - """ - matches = set(PERCENT_MATCHER.findall(s)) - for m in matches: - if not m.isupper(): - s = s.replace(m, m.upper()) - return s - - -def remove_dot_segments(s): - """Remove dot segments from the string. - - See also Section 5.2.4 of :rfc:`3986`. - """ - # See http://tools.ietf.org/html/rfc3986#section-5.2.4 for pseudo-code - segments = s.split("/") # Turn the path into a list of segments - output = [] # Initialize the variable to use to store output - - for segment in segments: - # '.' is the current directory, so ignore it, it is superfluous - if segment == ".": - continue - # Anything other than '..', should be appended to the output - elif segment != "..": - output.append(segment) - # In this case segment == '..', if we can, we should pop the last - # element - elif output: - output.pop() - - # If the path starts with '/' and the output is empty or the first string - # is non-empty - if s.startswith("/") and (not output or output[0]): - output.insert(0, "") - - # If the path starts with '/.' or '/..' ensure we add one more empty - # string to add a trailing '/' - if s.endswith(("/.", "/..")): - output.append("") - - return "/".join(output) - - -def encode_component(uri_component, encoding): - """Encode the specific component in the provided encoding.""" - if uri_component is None: - return uri_component - - # Try to see if the component we're encoding is already percent-encoded - # so we can skip all '%' characters but still encode all others. - percent_encodings = len( - PERCENT_MATCHER.findall(compat.to_str(uri_component, encoding)) - ) - - uri_bytes = compat.to_bytes(uri_component, encoding) - is_percent_encoded = percent_encodings == uri_bytes.count(b"%") - - encoded_uri = bytearray() - - for i in range(0, len(uri_bytes)): - # Will return a single character bytestring on both Python 2 & 3 - byte = uri_bytes[i : i + 1] - byte_ord = ord(byte) - if (is_percent_encoded and byte == b"%") or ( - byte_ord < 128 and byte.decode() in misc.NON_PCT_ENCODED - ): - encoded_uri.extend(byte) - continue - encoded_uri.extend("%{0:02x}".format(byte_ord).encode().upper()) - - return encoded_uri.decode(encoding) diff --git a/packages/rfc3986/parseresult.py b/packages/rfc3986/parseresult.py deleted file mode 100644 index 8887e8f11..000000000 --- a/packages/rfc3986/parseresult.py +++ /dev/null @@ -1,479 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright (c) 2015 Ian Stapleton Cordasco -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Module containing the urlparse compatibility logic.""" -from collections import namedtuple - -from . import compat -from . import exceptions -from . import misc -from . import normalizers -from . import uri - -__all__ = ("ParseResult", "ParseResultBytes") - -PARSED_COMPONENTS = ( - "scheme", - "userinfo", - "host", - "port", - "path", - "query", - "fragment", -) - - -class ParseResultMixin(object): - def _generate_authority(self, attributes): - # I swear I did not align the comparisons below. That's just how they - # happened to align based on pep8 and attribute lengths. - userinfo, host, port = ( - attributes[p] for p in ("userinfo", "host", "port") - ) - if ( - self.userinfo != userinfo - or self.host != host - or self.port != port - ): - if port: - port = "{0}".format(port) - return normalizers.normalize_authority( - ( - compat.to_str(userinfo, self.encoding), - compat.to_str(host, self.encoding), - port, - ) - ) - if isinstance(self.authority, bytes): - return self.authority.decode("utf-8") - return self.authority - - def geturl(self): - """Shim to match the standard library method.""" - return self.unsplit() - - @property - def hostname(self): - """Shim to match the standard library.""" - return self.host - - @property - def netloc(self): - """Shim to match the standard library.""" - return self.authority - - @property - def params(self): - """Shim to match the standard library.""" - return self.query - - -class ParseResult( - namedtuple("ParseResult", PARSED_COMPONENTS), ParseResultMixin -): - """Implementation of urlparse compatibility class. - - This uses the URIReference logic to handle compatibility with the - urlparse.ParseResult class. - """ - - slots = () - - def __new__( - cls, - scheme, - userinfo, - host, - port, - path, - query, - fragment, - uri_ref, - encoding="utf-8", - ): - """Create a new ParseResult.""" - parse_result = super(ParseResult, cls).__new__( - cls, - scheme or None, - userinfo or None, - host, - port or None, - path or None, - query, - fragment, - ) - parse_result.encoding = encoding - parse_result.reference = uri_ref - return parse_result - - @classmethod - def from_parts( - cls, - scheme=None, - userinfo=None, - host=None, - port=None, - path=None, - query=None, - fragment=None, - encoding="utf-8", - ): - """Create a ParseResult instance from its parts.""" - authority = "" - if userinfo is not None: - authority += userinfo + "@" - if host is not None: - authority += host - if port is not None: - authority += ":{0}".format(port) - uri_ref = uri.URIReference( - scheme=scheme, - authority=authority, - path=path, - query=query, - fragment=fragment, - encoding=encoding, - ).normalize() - userinfo, host, port = authority_from(uri_ref, strict=True) - return cls( - scheme=uri_ref.scheme, - userinfo=userinfo, - host=host, - port=port, - path=uri_ref.path, - query=uri_ref.query, - fragment=uri_ref.fragment, - uri_ref=uri_ref, - encoding=encoding, - ) - - @classmethod - def from_string( - cls, uri_string, encoding="utf-8", strict=True, lazy_normalize=True - ): - """Parse a URI from the given unicode URI string. - - :param str uri_string: Unicode URI to be parsed into a reference. - :param str encoding: The encoding of the string provided - :param bool strict: Parse strictly according to :rfc:`3986` if True. - If False, parse similarly to the standard library's urlparse - function. - :returns: :class:`ParseResult` or subclass thereof - """ - reference = uri.URIReference.from_string(uri_string, encoding) - if not lazy_normalize: - reference = reference.normalize() - userinfo, host, port = authority_from(reference, strict) - - return cls( - scheme=reference.scheme, - userinfo=userinfo, - host=host, - port=port, - path=reference.path, - query=reference.query, - fragment=reference.fragment, - uri_ref=reference, - encoding=encoding, - ) - - @property - def authority(self): - """Return the normalized authority.""" - return self.reference.authority - - def copy_with( - self, - scheme=misc.UseExisting, - userinfo=misc.UseExisting, - host=misc.UseExisting, - port=misc.UseExisting, - path=misc.UseExisting, - query=misc.UseExisting, - fragment=misc.UseExisting, - ): - """Create a copy of this instance replacing with specified parts.""" - attributes = zip( - PARSED_COMPONENTS, - (scheme, userinfo, host, port, path, query, fragment), - ) - attrs_dict = {} - for name, value in attributes: - if value is misc.UseExisting: - value = getattr(self, name) - attrs_dict[name] = value - authority = self._generate_authority(attrs_dict) - ref = self.reference.copy_with( - scheme=attrs_dict["scheme"], - authority=authority, - path=attrs_dict["path"], - query=attrs_dict["query"], - fragment=attrs_dict["fragment"], - ) - return ParseResult(uri_ref=ref, encoding=self.encoding, **attrs_dict) - - def encode(self, encoding=None): - """Convert to an instance of ParseResultBytes.""" - encoding = encoding or self.encoding - attrs = dict( - zip( - PARSED_COMPONENTS, - ( - attr.encode(encoding) if hasattr(attr, "encode") else attr - for attr in self - ), - ) - ) - return ParseResultBytes( - uri_ref=self.reference, encoding=encoding, **attrs - ) - - def unsplit(self, use_idna=False): - """Create a URI string from the components. - - :returns: The parsed URI reconstituted as a string. - :rtype: str - """ - parse_result = self - if use_idna and self.host: - hostbytes = self.host.encode("idna") - host = hostbytes.decode(self.encoding) - parse_result = self.copy_with(host=host) - return parse_result.reference.unsplit() - - -class ParseResultBytes( - namedtuple("ParseResultBytes", PARSED_COMPONENTS), ParseResultMixin -): - """Compatibility shim for the urlparse.ParseResultBytes object.""" - - def __new__( - cls, - scheme, - userinfo, - host, - port, - path, - query, - fragment, - uri_ref, - encoding="utf-8", - lazy_normalize=True, - ): - """Create a new ParseResultBytes instance.""" - parse_result = super(ParseResultBytes, cls).__new__( - cls, - scheme or None, - userinfo or None, - host, - port or None, - path or None, - query or None, - fragment or None, - ) - parse_result.encoding = encoding - parse_result.reference = uri_ref - parse_result.lazy_normalize = lazy_normalize - return parse_result - - @classmethod - def from_parts( - cls, - scheme=None, - userinfo=None, - host=None, - port=None, - path=None, - query=None, - fragment=None, - encoding="utf-8", - lazy_normalize=True, - ): - """Create a ParseResult instance from its parts.""" - authority = "" - if userinfo is not None: - authority += userinfo + "@" - if host is not None: - authority += host - if port is not None: - authority += ":{0}".format(int(port)) - uri_ref = uri.URIReference( - scheme=scheme, - authority=authority, - path=path, - query=query, - fragment=fragment, - encoding=encoding, - ) - if not lazy_normalize: - uri_ref = uri_ref.normalize() - to_bytes = compat.to_bytes - userinfo, host, port = authority_from(uri_ref, strict=True) - return cls( - scheme=to_bytes(scheme, encoding), - userinfo=to_bytes(userinfo, encoding), - host=to_bytes(host, encoding), - port=port, - path=to_bytes(path, encoding), - query=to_bytes(query, encoding), - fragment=to_bytes(fragment, encoding), - uri_ref=uri_ref, - encoding=encoding, - lazy_normalize=lazy_normalize, - ) - - @classmethod - def from_string( - cls, uri_string, encoding="utf-8", strict=True, lazy_normalize=True - ): - """Parse a URI from the given unicode URI string. - - :param str uri_string: Unicode URI to be parsed into a reference. - :param str encoding: The encoding of the string provided - :param bool strict: Parse strictly according to :rfc:`3986` if True. - If False, parse similarly to the standard library's urlparse - function. - :returns: :class:`ParseResultBytes` or subclass thereof - """ - reference = uri.URIReference.from_string(uri_string, encoding) - if not lazy_normalize: - reference = reference.normalize() - userinfo, host, port = authority_from(reference, strict) - - to_bytes = compat.to_bytes - return cls( - scheme=to_bytes(reference.scheme, encoding), - userinfo=to_bytes(userinfo, encoding), - host=to_bytes(host, encoding), - port=port, - path=to_bytes(reference.path, encoding), - query=to_bytes(reference.query, encoding), - fragment=to_bytes(reference.fragment, encoding), - uri_ref=reference, - encoding=encoding, - lazy_normalize=lazy_normalize, - ) - - @property - def authority(self): - """Return the normalized authority.""" - return self.reference.authority.encode(self.encoding) - - def copy_with( - self, - scheme=misc.UseExisting, - userinfo=misc.UseExisting, - host=misc.UseExisting, - port=misc.UseExisting, - path=misc.UseExisting, - query=misc.UseExisting, - fragment=misc.UseExisting, - lazy_normalize=True, - ): - """Create a copy of this instance replacing with specified parts.""" - attributes = zip( - PARSED_COMPONENTS, - (scheme, userinfo, host, port, path, query, fragment), - ) - attrs_dict = {} - for name, value in attributes: - if value is misc.UseExisting: - value = getattr(self, name) - if not isinstance(value, bytes) and hasattr(value, "encode"): - value = value.encode(self.encoding) - attrs_dict[name] = value - authority = self._generate_authority(attrs_dict) - to_str = compat.to_str - ref = self.reference.copy_with( - scheme=to_str(attrs_dict["scheme"], self.encoding), - authority=to_str(authority, self.encoding), - path=to_str(attrs_dict["path"], self.encoding), - query=to_str(attrs_dict["query"], self.encoding), - fragment=to_str(attrs_dict["fragment"], self.encoding), - ) - if not lazy_normalize: - ref = ref.normalize() - return ParseResultBytes( - uri_ref=ref, - encoding=self.encoding, - lazy_normalize=lazy_normalize, - **attrs_dict - ) - - def unsplit(self, use_idna=False): - """Create a URI bytes object from the components. - - :returns: The parsed URI reconstituted as a string. - :rtype: bytes - """ - parse_result = self - if use_idna and self.host: - # self.host is bytes, to encode to idna, we need to decode it - # first - host = self.host.decode(self.encoding) - hostbytes = host.encode("idna") - parse_result = self.copy_with(host=hostbytes) - if self.lazy_normalize: - parse_result = parse_result.copy_with(lazy_normalize=False) - uri = parse_result.reference.unsplit() - return uri.encode(self.encoding) - - -def split_authority(authority): - # Initialize our expected return values - userinfo = host = port = None - # Initialize an extra var we may need to use - extra_host = None - # Set-up rest in case there is no userinfo portion - rest = authority - - if "@" in authority: - userinfo, rest = authority.rsplit("@", 1) - - # Handle IPv6 host addresses - if rest.startswith("["): - host, rest = rest.split("]", 1) - host += "]" - - if ":" in rest: - extra_host, port = rest.split(":", 1) - elif not host and rest: - host = rest - - if extra_host and not host: - host = extra_host - - return userinfo, host, port - - -def authority_from(reference, strict): - try: - subauthority = reference.authority_info() - except exceptions.InvalidAuthority: - if strict: - raise - userinfo, host, port = split_authority(reference.authority) - else: - # Thanks to Richard Barrell for this idea: - # https://twitter.com/0x2ba22e11/status/617338811975139328 - userinfo, host, port = ( - subauthority.get(p) for p in ("userinfo", "host", "port") - ) - - if port: - try: - port = int(port) - except ValueError: - raise exceptions.InvalidPort(port) - return userinfo, host, port diff --git a/packages/rfc3986/uri.py b/packages/rfc3986/uri.py deleted file mode 100644 index 75c617d2a..000000000 --- a/packages/rfc3986/uri.py +++ /dev/null @@ -1,161 +0,0 @@ -"""Module containing the implementation of the URIReference class.""" -# -*- coding: utf-8 -*- -# Copyright (c) 2014 Rackspace -# Copyright (c) 2015 Ian Stapleton Cordasco -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. -from collections import namedtuple - -from . import compat -from . import misc -from . import normalizers -from ._mixin import URIMixin - - -class URIReference(namedtuple("URIReference", misc.URI_COMPONENTS), URIMixin): - """Immutable object representing a parsed URI Reference. - - .. note:: - - This class is not intended to be directly instantiated by the user. - - This object exposes attributes for the following components of a - URI: - - - scheme - - authority - - path - - query - - fragment - - .. attribute:: scheme - - The scheme that was parsed for the URI Reference. For example, - ``http``, ``https``, ``smtp``, ``imap``, etc. - - .. attribute:: authority - - Component of the URI that contains the user information, host, - and port sub-components. For example, - ``google.com``, ``127.0.0.1:5000``, ``username@[::1]``, - ``username:password@example.com:443``, etc. - - .. attribute:: path - - The path that was parsed for the given URI Reference. For example, - ``/``, ``/index.php``, etc. - - .. attribute:: query - - The query component for a given URI Reference. For example, ``a=b``, - ``a=b%20c``, ``a=b+c``, ``a=b,c=d,e=%20f``, etc. - - .. attribute:: fragment - - The fragment component of a URI. For example, ``section-3.1``. - - This class also provides extra attributes for easier access to information - like the subcomponents of the authority component. - - .. attribute:: userinfo - - The user information parsed from the authority. - - .. attribute:: host - - The hostname, IPv4, or IPv6 address parsed from the authority. - - .. attribute:: port - - The port parsed from the authority. - """ - - slots = () - - def __new__( - cls, scheme, authority, path, query, fragment, encoding="utf-8" - ): - """Create a new URIReference.""" - ref = super(URIReference, cls).__new__( - cls, - scheme or None, - authority or None, - path or None, - query, - fragment, - ) - ref.encoding = encoding - return ref - - __hash__ = tuple.__hash__ - - def __eq__(self, other): - """Compare this reference to another.""" - other_ref = other - if isinstance(other, tuple): - other_ref = URIReference(*other) - elif not isinstance(other, URIReference): - try: - other_ref = URIReference.from_string(other) - except TypeError: - raise TypeError( - "Unable to compare URIReference() to {0}()".format( - type(other).__name__ - ) - ) - - # See http://tools.ietf.org/html/rfc3986#section-6.2 - naive_equality = tuple(self) == tuple(other_ref) - return naive_equality or self.normalized_equality(other_ref) - - def normalize(self): - """Normalize this reference as described in Section 6.2.2. - - This is not an in-place normalization. Instead this creates a new - URIReference. - - :returns: A new reference object with normalized components. - :rtype: URIReference - """ - # See http://tools.ietf.org/html/rfc3986#section-6.2.2 for logic in - # this method. - return URIReference( - normalizers.normalize_scheme(self.scheme or ""), - normalizers.normalize_authority( - (self.userinfo, self.host, self.port) - ), - normalizers.normalize_path(self.path or ""), - normalizers.normalize_query(self.query), - normalizers.normalize_fragment(self.fragment), - self.encoding, - ) - - @classmethod - def from_string(cls, uri_string, encoding="utf-8"): - """Parse a URI reference from the given unicode URI string. - - :param str uri_string: Unicode URI to be parsed into a reference. - :param str encoding: The encoding of the string provided - :returns: :class:`URIReference` or subclass thereof - """ - uri_string = compat.to_str(uri_string, encoding) - - split_uri = misc.URI_MATCHER.match(uri_string).groupdict() - return cls( - split_uri["scheme"], - split_uri["authority"], - normalizers.encode_component(split_uri["path"], encoding), - normalizers.encode_component(split_uri["query"], encoding), - normalizers.encode_component(split_uri["fragment"], encoding), - encoding, - ) diff --git a/packages/rfc3986/validators.py b/packages/rfc3986/validators.py deleted file mode 100644 index f37524882..000000000 --- a/packages/rfc3986/validators.py +++ /dev/null @@ -1,447 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright (c) 2017 Ian Stapleton Cordasco -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Module containing the validation logic for rfc3986.""" -from . import exceptions -from . import misc -from . import normalizers - - -class Validator(object): - """Object used to configure validation of all objects in rfc3986. - - .. versionadded:: 1.0 - - Example usage:: - - >>> from rfc3986 import api, validators - >>> uri = api.uri_reference('https://github.com/') - >>> validator = validators.Validator().require_presence_of( - ... 'scheme', 'host', 'path', - ... ).allow_schemes( - ... 'http', 'https', - ... ).allow_hosts( - ... '127.0.0.1', 'github.com', - ... ) - >>> validator.validate(uri) - >>> invalid_uri = rfc3986.uri_reference('imap://mail.google.com') - >>> validator.validate(invalid_uri) - Traceback (most recent call last): - ... - rfc3986.exceptions.MissingComponentError: ('path was required but - missing', URIReference(scheme=u'imap', authority=u'mail.google.com', - path=None, query=None, fragment=None), ['path']) - - """ - - COMPONENT_NAMES = frozenset( - ["scheme", "userinfo", "host", "port", "path", "query", "fragment"] - ) - - def __init__(self): - """Initialize our default validations.""" - self.allowed_schemes = set() - self.allowed_hosts = set() - self.allowed_ports = set() - self.allow_password = True - self.required_components = { - "scheme": False, - "userinfo": False, - "host": False, - "port": False, - "path": False, - "query": False, - "fragment": False, - } - self.validated_components = self.required_components.copy() - - def allow_schemes(self, *schemes): - """Require the scheme to be one of the provided schemes. - - .. versionadded:: 1.0 - - :param schemes: - Schemes, without ``://`` that are allowed. - :returns: - The validator instance. - :rtype: - Validator - """ - for scheme in schemes: - self.allowed_schemes.add(normalizers.normalize_scheme(scheme)) - return self - - def allow_hosts(self, *hosts): - """Require the host to be one of the provided hosts. - - .. versionadded:: 1.0 - - :param hosts: - Hosts that are allowed. - :returns: - The validator instance. - :rtype: - Validator - """ - for host in hosts: - self.allowed_hosts.add(normalizers.normalize_host(host)) - return self - - def allow_ports(self, *ports): - """Require the port to be one of the provided ports. - - .. versionadded:: 1.0 - - :param ports: - Ports that are allowed. - :returns: - The validator instance. - :rtype: - Validator - """ - for port in ports: - port_int = int(port, base=10) - if 0 <= port_int <= 65535: - self.allowed_ports.add(port) - return self - - def allow_use_of_password(self): - """Allow passwords to be present in the URI. - - .. versionadded:: 1.0 - - :returns: - The validator instance. - :rtype: - Validator - """ - self.allow_password = True - return self - - def forbid_use_of_password(self): - """Prevent passwords from being included in the URI. - - .. versionadded:: 1.0 - - :returns: - The validator instance. - :rtype: - Validator - """ - self.allow_password = False - return self - - def check_validity_of(self, *components): - """Check the validity of the components provided. - - This can be specified repeatedly. - - .. versionadded:: 1.1 - - :param components: - Names of components from :attr:`Validator.COMPONENT_NAMES`. - :returns: - The validator instance. - :rtype: - Validator - """ - components = [c.lower() for c in components] - for component in components: - if component not in self.COMPONENT_NAMES: - raise ValueError( - '"{}" is not a valid component'.format(component) - ) - self.validated_components.update( - {component: True for component in components} - ) - return self - - def require_presence_of(self, *components): - """Require the components provided. - - This can be specified repeatedly. - - .. versionadded:: 1.0 - - :param components: - Names of components from :attr:`Validator.COMPONENT_NAMES`. - :returns: - The validator instance. - :rtype: - Validator - """ - components = [c.lower() for c in components] - for component in components: - if component not in self.COMPONENT_NAMES: - raise ValueError( - '"{}" is not a valid component'.format(component) - ) - self.required_components.update( - {component: True for component in components} - ) - return self - - def validate(self, uri): - """Check a URI for conditions specified on this validator. - - .. versionadded:: 1.0 - - :param uri: - Parsed URI to validate. - :type uri: - rfc3986.uri.URIReference - :raises MissingComponentError: - When a required component is missing. - :raises UnpermittedComponentError: - When a component is not one of those allowed. - :raises PasswordForbidden: - When a password is present in the userinfo component but is - not permitted by configuration. - :raises InvalidComponentsError: - When a component was found to be invalid. - """ - if not self.allow_password: - check_password(uri) - - required_components = [ - component - for component, required in self.required_components.items() - if required - ] - validated_components = [ - component - for component, required in self.validated_components.items() - if required - ] - if required_components: - ensure_required_components_exist(uri, required_components) - if validated_components: - ensure_components_are_valid(uri, validated_components) - - ensure_one_of(self.allowed_schemes, uri, "scheme") - ensure_one_of(self.allowed_hosts, uri, "host") - ensure_one_of(self.allowed_ports, uri, "port") - - -def check_password(uri): - """Assert that there is no password present in the uri.""" - userinfo = uri.userinfo - if not userinfo: - return - credentials = userinfo.split(":", 1) - if len(credentials) <= 1: - return - raise exceptions.PasswordForbidden(uri) - - -def ensure_one_of(allowed_values, uri, attribute): - """Assert that the uri's attribute is one of the allowed values.""" - value = getattr(uri, attribute) - if value is not None and allowed_values and value not in allowed_values: - raise exceptions.UnpermittedComponentError( - attribute, - value, - allowed_values, - ) - - -def ensure_required_components_exist(uri, required_components): - """Assert that all required components are present in the URI.""" - missing_components = sorted( - [ - component - for component in required_components - if getattr(uri, component) is None - ] - ) - if missing_components: - raise exceptions.MissingComponentError(uri, *missing_components) - - -def is_valid(value, matcher, require): - """Determine if a value is valid based on the provided matcher. - - :param str value: - Value to validate. - :param matcher: - Compiled regular expression to use to validate the value. - :param require: - Whether or not the value is required. - """ - if require: - return value is not None and matcher.match(value) - - # require is False and value is not None - return value is None or matcher.match(value) - - -def authority_is_valid(authority, host=None, require=False): - """Determine if the authority string is valid. - - :param str authority: - The authority to validate. - :param str host: - (optional) The host portion of the authority to validate. - :param bool require: - (optional) Specify if authority must not be None. - :returns: - ``True`` if valid, ``False`` otherwise - :rtype: - bool - """ - validated = is_valid(authority, misc.SUBAUTHORITY_MATCHER, require) - if validated and host is not None: - return host_is_valid(host, require) - return validated - - -def host_is_valid(host, require=False): - """Determine if the host string is valid. - - :param str host: - The host to validate. - :param bool require: - (optional) Specify if host must not be None. - :returns: - ``True`` if valid, ``False`` otherwise - :rtype: - bool - """ - validated = is_valid(host, misc.HOST_MATCHER, require) - if validated and host is not None and misc.IPv4_MATCHER.match(host): - return valid_ipv4_host_address(host) - elif validated and host is not None and misc.IPv6_MATCHER.match(host): - return misc.IPv6_NO_RFC4007_MATCHER.match(host) is not None - return validated - - -def scheme_is_valid(scheme, require=False): - """Determine if the scheme is valid. - - :param str scheme: - The scheme string to validate. - :param bool require: - (optional) Set to ``True`` to require the presence of a scheme. - :returns: - ``True`` if the scheme is valid. ``False`` otherwise. - :rtype: - bool - """ - return is_valid(scheme, misc.SCHEME_MATCHER, require) - - -def path_is_valid(path, require=False): - """Determine if the path component is valid. - - :param str path: - The path string to validate. - :param bool require: - (optional) Set to ``True`` to require the presence of a path. - :returns: - ``True`` if the path is valid. ``False`` otherwise. - :rtype: - bool - """ - return is_valid(path, misc.PATH_MATCHER, require) - - -def query_is_valid(query, require=False): - """Determine if the query component is valid. - - :param str query: - The query string to validate. - :param bool require: - (optional) Set to ``True`` to require the presence of a query. - :returns: - ``True`` if the query is valid. ``False`` otherwise. - :rtype: - bool - """ - return is_valid(query, misc.QUERY_MATCHER, require) - - -def fragment_is_valid(fragment, require=False): - """Determine if the fragment component is valid. - - :param str fragment: - The fragment string to validate. - :param bool require: - (optional) Set to ``True`` to require the presence of a fragment. - :returns: - ``True`` if the fragment is valid. ``False`` otherwise. - :rtype: - bool - """ - return is_valid(fragment, misc.FRAGMENT_MATCHER, require) - - -def valid_ipv4_host_address(host): - """Determine if the given host is a valid IPv4 address.""" - # If the host exists, and it might be IPv4, check each byte in the - # address. - return all([0 <= int(byte, base=10) <= 255 for byte in host.split(".")]) - - -_COMPONENT_VALIDATORS = { - "scheme": scheme_is_valid, - "path": path_is_valid, - "query": query_is_valid, - "fragment": fragment_is_valid, -} - -_SUBAUTHORITY_VALIDATORS = set(["userinfo", "host", "port"]) - - -def subauthority_component_is_valid(uri, component): - """Determine if the userinfo, host, and port are valid.""" - try: - subauthority_dict = uri.authority_info() - except exceptions.InvalidAuthority: - return False - - # If we can parse the authority into sub-components and we're not - # validating the port, we can assume it's valid. - if component == "host": - return host_is_valid(subauthority_dict["host"]) - elif component != "port": - return True - - try: - port = int(subauthority_dict["port"]) - except TypeError: - # If the port wasn't provided it'll be None and int(None) raises a - # TypeError - return True - - return 0 <= port <= 65535 - - -def ensure_components_are_valid(uri, validated_components): - """Assert that all components are valid in the URI.""" - invalid_components = set([]) - for component in validated_components: - if component in _SUBAUTHORITY_VALIDATORS: - if not subauthority_component_is_valid(uri, component): - invalid_components.add(component) - # Python's peephole optimizer means that while this continue *is* - # actually executed, coverage.py cannot detect that. See also, - # https://bitbucket.org/ned/coveragepy/issues/198/continue-marked-as-not-covered - continue # nocov: Python 2.7, 3.3, 3.4 - - validator = _COMPONENT_VALIDATORS[component] - if not validator(getattr(uri, component)): - invalid_components.add(component) - - if invalid_components: - raise exceptions.InvalidComponentsError(uri, *invalid_components) diff --git a/packages/sniffio/__init__.py b/packages/sniffio/__init__.py index ddbd53a53..fb3364d7f 100644 --- a/packages/sniffio/__init__.py +++ b/packages/sniffio/__init__.py @@ -11,4 +11,5 @@ current_async_library, AsyncLibraryNotFoundError, current_async_library_cvar, + thread_local, ) diff --git a/packages/sniffio/_impl.py b/packages/sniffio/_impl.py index 240d3a2ed..c1a7bbf21 100644 --- a/packages/sniffio/_impl.py +++ b/packages/sniffio/_impl.py @@ -1,12 +1,23 @@ from contextvars import ContextVar from typing import Optional import sys +import threading current_async_library_cvar = ContextVar( "current_async_library_cvar", default=None ) # type: ContextVar[Optional[str]] +class _ThreadLocal(threading.local): + # Since threading.local provides no explicit mechanism is for setting + # a default for a value, a custom class with a class attribute is used + # instead. + name = None # type: Optional[str] + + +thread_local = _ThreadLocal() + + class AsyncLibraryNotFoundError(RuntimeError): pass @@ -52,15 +63,13 @@ async def generic_sleep(seconds): raise RuntimeError(f"Unsupported library {library!r}") """ - value = current_async_library_cvar.get() + value = thread_local.name if value is not None: return value - # Sniff for curio (for now) - if 'curio' in sys.modules: - from curio.meta import curio_running - if curio_running(): - return 'curio' + value = current_async_library_cvar.get() + if value is not None: + return value # Need to sniff for asyncio if "asyncio" in sys.modules: @@ -71,13 +80,16 @@ async def generic_sleep(seconds): current_task = asyncio.Task.current_task # type: ignore[attr-defined] try: if current_task() is not None: - if (3, 7) <= sys.version_info: - # asyncio has contextvars support, and we're in a task, so - # we can safely cache the sniffed value - current_async_library_cvar.set("asyncio") return "asyncio" except RuntimeError: pass + + # Sniff for curio (for now) + if 'curio' in sys.modules: + from curio.meta import curio_running + if curio_running(): + return 'curio' + raise AsyncLibraryNotFoundError( "unknown async library, or not in async context" ) diff --git a/packages/sniffio/_tests/test_sniffio.py b/packages/sniffio/_tests/test_sniffio.py index a19d13c9b..984c8c00d 100644 --- a/packages/sniffio/_tests/test_sniffio.py +++ b/packages/sniffio/_tests/test_sniffio.py @@ -1,14 +1,15 @@ +import os import sys import pytest from .. import ( current_async_library, AsyncLibraryNotFoundError, - current_async_library_cvar + current_async_library_cvar, thread_local ) -def test_basics(): +def test_basics_cvar(): with pytest.raises(AsyncLibraryNotFoundError): current_async_library() @@ -22,6 +23,20 @@ def test_basics(): current_async_library() +def test_basics_tlocal(): + with pytest.raises(AsyncLibraryNotFoundError): + current_async_library() + + old_name, thread_local.name = thread_local.name, "generic-lib" + try: + assert current_async_library() == "generic-lib" + finally: + thread_local.name = old_name + + with pytest.raises(AsyncLibraryNotFoundError): + current_async_library() + + def test_asyncio(): import asyncio @@ -36,16 +51,18 @@ async def this_is_asyncio(): assert current_async_library() == "asyncio" ran.append(True) - loop = asyncio.get_event_loop() - loop.run_until_complete(this_is_asyncio()) + asyncio.run(this_is_asyncio()) assert ran == [True] - loop.close() with pytest.raises(AsyncLibraryNotFoundError): current_async_library() -@pytest.mark.skipif(sys.version_info < (3, 6), reason='Curio requires 3.6+') +# https://github.com/dabeaz/curio/pull/354 +@pytest.mark.skipif( + os.name == "nt" and sys.version_info >= (3, 9), + reason="Curio breaks on Python 3.9+ on Windows. Fix was not released yet", +) def test_curio(): import curio diff --git a/packages/sniffio/_version.py b/packages/sniffio/_version.py index 7c4b11980..5a5f906bb 100644 --- a/packages/sniffio/_version.py +++ b/packages/sniffio/_version.py @@ -1,3 +1,3 @@ # This file is imported from __init__.py and exec'd from setup.py -__version__ = "1.2.0" +__version__ = "1.3.0" diff --git a/resources/lib/services/nfsession/session/base.py b/resources/lib/services/nfsession/session/base.py index 907c0c81a..d1c0e023c 100644 --- a/resources/lib/services/nfsession/session/base.py +++ b/resources/lib/services/nfsession/session/base.py @@ -44,7 +44,7 @@ def _init_session(self): import httpx # (http1=False, http2=True) means that the client know that server support HTTP/2 and avoid to do negotiations, # prior knowledge: https://python-hyper.org/projects/hyper-h2/en/v2.3.1/negotiating-http2.html#prior-knowledge - self.session = httpx.Client(http1=False, http2=True) + self.session = httpx.Client(http1=False, http2=True, follow_redirects=True) self.session.max_redirects = 10 # Too much redirects should means some problem self.session.headers.update({ 'User-Agent': common.get_user_agent(enable_android_mediaflag_fix=True),