From 6e730fe4621451991a558f8d0215b1e8e9234d1d Mon Sep 17 00:00:00 2001 From: Stefan Eissing Date: Mon, 9 Jan 2023 16:58:10 +0100 Subject: [PATCH] Basic infra to run curl against an apache httpd plus nghttpx for h3. - adding '--with-test-httpd=' to configure non-standard apache2 install - python env and base classes for running httpd - basic tests for connectivity with h1/h2/h3 - adding test cases for truncated responses in http versions. - adding goaway test for HTTP/3. - adding "stuttering" tests with parallel downloads in chunks with varying delays between chunks. - adding a curltest module to the httpd server, adding GOAWAY test. - mod_curltest now installs 2 handlers - 'echo': writing as response body what came as request body - 'tweak': with query parameters to tweak response behaviour - marked known fails as skip for now --- .github/scripts/spellcheck.words | 7 + .lift/config.toml | 2 +- configure.ac | 56 ++ lib/http2.c | 59 +- lib/vquic/curl_ngtcp2.c | 81 ++- tests/tests-httpd/.gitignore | 6 + tests/tests-httpd/README.md | 135 +++++ tests/tests-httpd/config.ini.in | 39 ++ tests/tests-httpd/conftest.py | 81 +++ tests/tests-httpd/test_01_basic.py | 89 +++ tests/tests-httpd/test_02_download.py | 173 ++++++ tests/tests-httpd/test_03_goaway.py | 113 ++++ tests/tests-httpd/test_04_stuttered.py | 143 +++++ tests/tests-httpd/test_05_errors.py | 87 +++ tests/tests-httpd/testenv/__init__.py | 31 + tests/tests-httpd/testenv/certs.py | 528 ++++++++++++++++++ tests/tests-httpd/testenv/curl.py | 360 ++++++++++++ tests/tests-httpd/testenv/env.py | 243 ++++++++ tests/tests-httpd/testenv/httpd.py | 288 ++++++++++ .../testenv/mod_curltest/.gitignore | 5 + .../testenv/mod_curltest/mod_curltest.c | 411 ++++++++++++++ tests/tests-httpd/testenv/nghttpx.py | 125 +++++ 22 files changed, 3016 insertions(+), 46 deletions(-) create mode 100644 tests/tests-httpd/.gitignore create mode 100644 tests/tests-httpd/README.md create mode 100644 tests/tests-httpd/config.ini.in create mode 100644 tests/tests-httpd/conftest.py create mode 100644 tests/tests-httpd/test_01_basic.py create mode 100644 tests/tests-httpd/test_02_download.py create mode 100644 tests/tests-httpd/test_03_goaway.py create mode 100644 tests/tests-httpd/test_04_stuttered.py create mode 100644 tests/tests-httpd/test_05_errors.py create mode 100644 tests/tests-httpd/testenv/__init__.py create mode 100644 tests/tests-httpd/testenv/certs.py create mode 100644 tests/tests-httpd/testenv/curl.py create mode 100644 tests/tests-httpd/testenv/env.py create mode 100644 tests/tests-httpd/testenv/httpd.py create mode 100644 tests/tests-httpd/testenv/mod_curltest/.gitignore create mode 100644 tests/tests-httpd/testenv/mod_curltest/mod_curltest.c create mode 100644 tests/tests-httpd/testenv/nghttpx.py diff --git a/.github/scripts/spellcheck.words b/.github/scripts/spellcheck.words index df18e0413f326c..6486dacb94665d 100644 --- a/.github/scripts/spellcheck.words +++ b/.github/scripts/spellcheck.words @@ -20,6 +20,8 @@ AmigaOS AmiSSL anyauth anycast +apache +Apache API APIs APOP @@ -135,6 +137,7 @@ CURLE CURLH curlimages curlrc +curltest customizable CVE CVSS @@ -295,6 +298,8 @@ hsts HTC html http +httpd +HTTPD HTTPAUTH httpget HttpGet @@ -577,6 +582,8 @@ ptr punycode py pycurl +pytest +Pytest QNX QoS Qubes diff --git a/.lift/config.toml b/.lift/config.toml index f26e2e46c34816..e9dc0332ed7c77 100644 --- a/.lift/config.toml +++ b/.lift/config.toml @@ -2,6 +2,6 @@ # # SPDX-License-Identifier: curl -ignoreRules = [ "DEAD_STORE" ] +ignoreRules = [ "DEAD_STORE", "subprocess_without_shell_equals_true" ] build = "make" setup = ".lift/setup.sh" diff --git a/configure.ac b/configure.ac index 6b8a8187fa3f71..1a4e21c260f192 100644 --- a/configure.ac +++ b/configure.ac @@ -311,6 +311,61 @@ AS_HELP_STRING([--with-test-nghttpx=PATH],[where to find nghttpx for testing]), ) AC_SUBST(TEST_NGHTTPX) +dnl we'd like a httpd+apachectl as test server +dnl +AC_ARG_WITH(test-httpd, [AS_HELP_STRING([--with-test-httpd=PATH], + [where to find httpd/apache2 for testing])], + [request_httpd=$withval], [request_httpd=check]) +if test x"$request_httpd" = "xcheck"; then + if test -x "/usr/sbin/apache2" -a -x "/usr/sbin/apache2ctl"; then + # common location on distros (debian/ubuntu) + HTTPD="/usr/sbin/apache2" + APACHECTL="/usr/sbin/apache2ctl" + AC_PATH_PROG([APXS], [apxs]) + if test "x$APXS" != "x"; then + AC_MSG_NOTICE([apache2-dev not installed, httpd tests disabled]) + fi + else + AC_PATH_PROG([HTTPD], [httpd]) + if test "x$HTTPD" = "x"; then + AC_PATH_PROG([HTTPD], [apache2]) + fi + AC_PATH_PROG([APACHECTL], [apachectl]) + AC_PATH_PROG([APXS], [apxs]) + if test "x$HTTPD" = "x" -o "x$APACHECTL" = "x"; then + AC_MSG_NOTICE([httpd/apache2 not in PATH, httpd tests disabled]) + fi + if test "x$APXS" = "x"; then + AC_MSG_NOTICE([apxs not in PATH, httpd tests disabled]) + fi + fi +else + HTTPD="${request_httpd}/bin/httpd" + APACHECTL="${request_httpd}/bin/apachectl" + APXS="${request_httpd}/bin/apxs" + if test ! -x "${HTTPD}"; then + AC_MSG_NOTICE([httpd not found as ${HTTPD}, httpd tests disabled]) + elif test ! -x "${APACHECTL}"; then + AC_MSG_NOTICE([apachectl not found as ${APACHECTL}, httpd tests disabled]) + elif test ! -x "${APXS}"; then + AC_MSG_NOTICE([apxs not found as ${APXS}, httpd tests disabled]) + else + AC_MSG_NOTICE([using HTTPD=$HTTPD for tests]) + fi +fi +AC_SUBST(HTTPD) +AC_SUBST(APACHECTL) +AC_SUBST(APXS) + +dnl the nghttpx we might use in httpd testing +if test "x$TEST_NGHTTPX" != "x"; then + HTTPD_NGHTTPX="$TEST_NGHTTPX" +else + AC_PATH_PROG([HTTPD_NGHTTPX], [nghttpx]) +fi + AC_PATH_PROG([APXS], [apxs]) +AC_SUBST(HTTPD_NGHTTPX) + dnl If no TLS choice has been made, check if it was explicitly disabled or dnl error out to force the user to decide. if test -z "$TLSCHOICE"; then @@ -4590,6 +4645,7 @@ AC_CONFIG_FILES([Makefile \ tests/server/Makefile \ tests/libtest/Makefile \ tests/unit/Makefile \ + tests/tests-httpd/config.ini \ packages/Makefile \ packages/vms/Makefile \ curl-config \ diff --git a/lib/http2.c b/lib/http2.c index f9e90862400c55..88064aa1a05bc2 100644 --- a/lib/http2.c +++ b/lib/http2.c @@ -772,48 +772,60 @@ static int on_frame_recv(nghttp2_session *session, const nghttp2_frame *frame, if(!stream_id) { /* stream ID zero is for connection-oriented stuff */ - if(frame->hd.type == NGHTTP2_SETTINGS) { + DEBUGASSERT(data); + switch(frame->hd.type) { + case NGHTTP2_SETTINGS: { uint32_t max_conn = ctx->max_concurrent_streams; - H2BUGF(infof(data, "Got SETTINGS")); + H2BUGF(infof(data, CFMSG(cf, "recv frame SETTINGS"))); ctx->max_concurrent_streams = nghttp2_session_get_remote_settings( session, NGHTTP2_SETTINGS_MAX_CONCURRENT_STREAMS); ctx->enable_push = nghttp2_session_get_remote_settings( session, NGHTTP2_SETTINGS_ENABLE_PUSH); - H2BUGF(infof(data, "MAX_CONCURRENT_STREAMS == %d", + H2BUGF(infof(data, CFMSG(cf, "MAX_CONCURRENT_STREAMS == %d"), ctx->max_concurrent_streams)); - H2BUGF(infof(data, "ENABLE_PUSH == %s", + H2BUGF(infof(data, CFMSG(cf, "ENABLE_PUSH == %s"), ctx->enable_push?"TRUE":"false")); - if(max_conn != ctx->max_concurrent_streams) { + if(data && max_conn != ctx->max_concurrent_streams) { /* only signal change if the value actually changed */ - infof(data, - "Connection state changed (MAX_CONCURRENT_STREAMS == %u)!", + infof(data, CFMSG(cf, "MAX_CONCURRENT_STREAMS now %u"), ctx->max_concurrent_streams); multi_connchanged(data->multi); } + break; + } + case NGHTTP2_GOAWAY: + if(data) { + infof(data, "recveived GOAWAY, error=%d, last_stream=%u", + frame->goaway.error_code, frame->goaway.last_stream_id); + multi_connchanged(data->multi); + } + break; + case NGHTTP2_WINDOW_UPDATE: + H2BUGF(infof(data, CFMSG(cf, "recv frame WINDOW_UPDATE"))); + break; + default: + H2BUGF(infof(data, CFMSG(cf, "recv frame %x on 0"), frame->hd.type)); } return 0; } data_s = nghttp2_session_get_stream_user_data(session, stream_id); if(!data_s) { - H2BUGF(infof(data, - "No Curl_easy associated with stream: %u", + H2BUGF(infof(data, CFMSG(cf, "No Curl_easy associated with stream: %u"), stream_id)); return 0; } stream = data_s->req.p.http; if(!stream) { - H2BUGF(infof(data_s, "No proto pointer for stream: %u", + H2BUGF(infof(data_s, CFMSG(cf, "No proto pointer for stream: %u"), stream_id)); return NGHTTP2_ERR_CALLBACK_FAILURE; } - H2BUGF(infof(data_s, "on_frame_recv() header %x stream %u", - frame->hd.type, stream_id)); - switch(frame->hd.type) { case NGHTTP2_DATA: /* If body started on this stream, then receiving DATA is illegal. */ + H2BUGF(infof(data_s, CFMSG(cf, "recv frame DATA stream %u"), stream_id)); if(!stream->bodystarted) { rv = nghttp2_submit_rst_stream(session, NGHTTP2_FLAG_NONE, stream_id, NGHTTP2_PROTOCOL_ERROR); @@ -824,6 +836,8 @@ static int on_frame_recv(nghttp2_session *session, const nghttp2_frame *frame, } break; case NGHTTP2_HEADERS: + H2BUGF(infof(data_s, CFMSG(cf, "recv frame HEADERS stream %u"), + stream_id)); if(stream->bodystarted) { /* Only valid HEADERS after body started is trailer HEADERS. We buffer them in on_header callback. */ @@ -857,7 +871,7 @@ static int on_frame_recv(nghttp2_session *session, const nghttp2_frame *frame, stream->nread_header_recvbuf += ncopy; DEBUGASSERT(stream->mem); - H2BUGF(infof(data_s, "Store %zu bytes headers from stream %u at %p", + H2BUGF(infof(data_s, CFMSG(cf, "%zu header bytes, stream %u at %p"), ncopy, stream_id, stream->mem)); stream->len -= ncopy; @@ -869,6 +883,8 @@ static int on_frame_recv(nghttp2_session *session, const nghttp2_frame *frame, Curl_expire(data_s, 0, EXPIRE_RUN_NOW); break; case NGHTTP2_PUSH_PROMISE: + H2BUGF(infof(data_s, CFMSG(cf, "recv frame PUSH_PROMISE stream %u"), + stream_id)); rv = push_promise(cf, data_s, &frame->push_promise); if(rv) { /* deny! */ int h2; @@ -879,13 +895,13 @@ static int on_frame_recv(nghttp2_session *session, const nghttp2_frame *frame, if(nghttp2_is_fatal(h2)) return NGHTTP2_ERR_CALLBACK_FAILURE; else if(rv == CURL_PUSH_ERROROUT) { - DEBUGF(infof(data_s, "Fail the parent stream (too)")); + DEBUGF(infof(data_s, CFMSG(cf, "Fail the parent stream (too)"))); return NGHTTP2_ERR_CALLBACK_FAILURE; } } break; default: - H2BUGF(infof(data_s, "Got frame type %x for stream %u", + H2BUGF(infof(data_s, CFMSG(cf, "recv frame %x for stream %u"), frame->hd.type, stream_id)); break; } @@ -2210,12 +2226,19 @@ static CURLcode h2_cf_query(struct Curl_cfilter *cf, int query, int *pres1, void **pres2) { struct h2_cf_ctx *ctx = cf->ctx; + size_t effective_max; switch(query) { case CF_QUERY_MAX_CONCURRENT: DEBUGASSERT(pres1); - *pres1 = (ctx->max_concurrent_streams > INT_MAX)? - INT_MAX : (int)ctx->max_concurrent_streams; + if(nghttp2_session_check_request_allowed(ctx->h2) == 0) { + /* the limit is what we have in use right now */ + effective_max = CONN_INUSE(cf->conn); + } + else { + effective_max = ctx->max_concurrent_streams; + } + *pres1 = (effective_max > INT_MAX)? INT_MAX : (int)effective_max; return CURLE_OK; default: break; diff --git a/lib/vquic/curl_ngtcp2.c b/lib/vquic/curl_ngtcp2.c index 5ee5ad86bee473..291255e7945be2 100644 --- a/lib/vquic/curl_ngtcp2.c +++ b/lib/vquic/curl_ngtcp2.c @@ -67,15 +67,24 @@ #include "memdebug.h" /* #define DEBUG_NGTCP2 */ -#ifdef CURLDEBUG -#define DEBUG_HTTP3 -#endif -#ifdef DEBUG_HTTP3 -#define H3BUGF(x) x +#define DEBUG_CF 0 + +#if DEBUG_CF +#define CF_DEBUGF(x) x +#define H3STRM(cf,sid,msg) \ + "[CONN-%ld-%d][CF-%s][h3sid=%" PRIx64 "] "msg, (cf)->conn->connection_id, \ + (cf)->sockindex, (cf)->cft->name, sid +#define QSTRM(cf,sid,msg) \ + "[CONN-%ld-%d][CF-%s][qsid=%" PRIx64 "] "msg, (cf)->conn->connection_id, \ + (cf)->sockindex, (cf)->cft->name, sid + #else -#define H3BUGF(x) do { } while(0) +#define H3STRM(cf,sid,msg) +#define QSTRM(cf,sid,msg) +#define CF_DEBUGF(x) do { } while(0) #endif + #define H3_ALPN_H3_29 "\x5h3-29" #define H3_ALPN_H3 "\x2h3" @@ -198,8 +207,11 @@ static ngtcp2_tstamp timestamp(void) #ifdef DEBUG_NGTCP2 static void quic_printf(void *user_data, const char *fmt, ...) { + struct Curl_cfilter *cf = user_data; + struct cf_ngtcp2_ctx *ctx = cf->ctx; + + (void)ctx; /* TODO: need an easy handle to infof() message */ va_list ap; - (void)user_data; /* TODO, use this to do infof() instead long-term */ va_start(ap, fmt); vfprintf(stderr, fmt, ap); va_end(ap); @@ -430,15 +442,15 @@ static CURLcode quic_init_ssl(struct Curl_cfilter *cf, gnutls_session_set_ptr(ctx->gtls->session, &ctx->conn_ref); if(ngtcp2_crypto_gnutls_configure_client_session(ctx->gtls->session) != 0) { - H3BUGF(fprintf(stderr, - "ngtcp2_crypto_gnutls_configure_client_session failed\n")); + CF_DEBUGF(fprintf(stderr, + "ngtcp2_crypto_gnutls_configure_client_session failed\n")); return CURLE_QUIC_CONNECT_ERROR; } rc = gnutls_priority_set_direct(ctx->gtls->session, QUIC_PRIORITY, NULL); if(rc < 0) { - H3BUGF(fprintf(stderr, "gnutls_priority_set_direct failed: %s\n", - gnutls_strerror(rc))); + CF_DEBUGF(fprintf(stderr, "gnutls_priority_set_direct failed: %s\n", + gnutls_strerror(rc))); return CURLE_QUIC_CONNECT_ERROR; } @@ -637,9 +649,11 @@ static int cb_stream_close(ngtcp2_conn *tconn, uint32_t flags, { struct Curl_cfilter *cf = user_data; struct cf_ngtcp2_ctx *ctx = cf->ctx; + struct Curl_easy *data = stream_user_data; int rv; + + (void)data; (void)tconn; - (void)stream_user_data; /* stream is closed... */ if(!(flags & NGTCP2_STREAM_CLOSE_FLAG_APP_ERROR_CODE_SET)) { @@ -648,6 +662,7 @@ static int cb_stream_close(ngtcp2_conn *tconn, uint32_t flags, rv = nghttp3_conn_close_stream(ctx->h3conn, stream_id, app_error_code); + CF_DEBUGF(infof(data, QSTRM(cf, stream_id, "close -> %d"), rv)); if(rv) { ngtcp2_connection_close_error_set_application_error( &ctx->last_error, nghttp3_err_infer_quic_app_error_code(rv), NULL, 0); @@ -663,13 +678,15 @@ static int cb_stream_reset(ngtcp2_conn *tconn, int64_t stream_id, { struct Curl_cfilter *cf = user_data; struct cf_ngtcp2_ctx *ctx = cf->ctx; + struct Curl_easy *data = stream_user_data; int rv; (void)tconn; (void)final_size; (void)app_error_code; - (void)stream_user_data; + (void)data; rv = nghttp3_conn_shutdown_stream_read(ctx->h3conn, stream_id); + CF_DEBUGF(infof(data, QSTRM(cf, stream_id, "reset -> %d"), rv)); if(rv) { return NGTCP2_ERR_CALLBACK_FAILURE; } @@ -850,14 +867,15 @@ static int cb_h3_stream_close(nghttp3_conn *conn, int64_t stream_id, uint64_t app_error_code, void *user_data, void *stream_user_data) { + struct Curl_cfilter *cf = user_data; struct Curl_easy *data = stream_user_data; struct HTTP *stream = data->req.p.http; (void)conn; (void)stream_id; (void)app_error_code; - (void)user_data; - H3BUGF(infof(data, "cb_h3_stream_close CALLED")); + (void)cf; + CF_DEBUGF(infof(data, H3STRM(cf, stream_id, "close"))); stream->closed = TRUE; stream->error3 = app_error_code; Curl_expire(data, 0, EXPIRE_QUIC); @@ -897,12 +915,16 @@ static int cb_h3_recv_data(nghttp3_conn *conn, int64_t stream_id, const uint8_t *buf, size_t buflen, void *user_data, void *stream_user_data) { + struct Curl_cfilter *cf = user_data; struct Curl_easy *data = stream_user_data; struct HTTP *stream = data->req.p.http; CURLcode result = CURLE_OK; (void)conn; + (void)cf; result = write_data(stream, buf, buflen); + CF_DEBUGF(infof(data, H3STRM(cf, stream_id, "recv_data(len=%zu) -> %d"), + buflen, result)); if(result) { return -1; } @@ -957,13 +979,14 @@ static int decode_status_code(const uint8_t *value, size_t len) static int cb_h3_end_headers(nghttp3_conn *conn, int64_t stream_id, int fin, void *user_data, void *stream_user_data) { + struct Curl_cfilter *cf = user_data; struct Curl_easy *data = stream_user_data; struct HTTP *stream = data->req.p.http; CURLcode result = CURLE_OK; (void)conn; (void)stream_id; - (void)user_data; (void)fin; + (void)cf; /* add a CRLF only if we've received some headers */ if(stream->firstheader) { @@ -973,6 +996,8 @@ static int cb_h3_end_headers(nghttp3_conn *conn, int64_t stream_id, } } + CF_DEBUGF(infof(data, H3STRM(cf, stream_id, "end_headers(status_code=%d"), + stream->status_code)); if(stream->status_code / 100 != 1) { stream->bodystarted = TRUE; } @@ -1054,12 +1079,14 @@ static int cb_h3_reset_stream(nghttp3_conn *conn, int64_t stream_id, void *stream_user_data) { struct Curl_cfilter *cf = user_data; struct cf_ngtcp2_ctx *ctx = cf->ctx; + struct Curl_easy *data = stream_user_data; int rv; (void)conn; - (void)stream_user_data; + (void)data; rv = ngtcp2_conn_shutdown_stream_write(ctx->qconn, stream_id, app_error_code); + CF_DEBUGF(infof(data, H3STRM(cf, stream_id, "reset -> %d"), rv)); if(rv && rv != NGTCP2_ERR_STREAM_NOT_FOUND) { return NGTCP2_ERR_CALLBACK_FAILURE; } @@ -1251,9 +1278,9 @@ static int cb_h3_acked_stream_data(nghttp3_conn *conn, int64_t stream_id, if(!data->set.postfields) { stream->h3out->used -= datalen; - H3BUGF(infof(data, - "cb_h3_acked_stream_data, %zd bytes, %zd left unacked", - datalen, stream->h3out->used)); + CF_DEBUGF(infof(data, + "cb_h3_acked_stream_data, %zd bytes, %zd left unacked", + datalen, stream->h3out->used)); DEBUGASSERT(stream->h3out->used < H3_SEND_SIZE); if(stream->h3out->used == 0) { @@ -1318,13 +1345,13 @@ static nghttp3_ssize cb_h3_readfunction(nghttp3_conn *conn, int64_t stream_id, if(!stream->upload_left) *pflags = NGHTTP3_DATA_FLAG_EOF; } - H3BUGF(infof(data, "cb_h3_readfunction %zd bytes%s (at %zd unacked)", - nread, *pflags == NGHTTP3_DATA_FLAG_EOF?" EOF":"", - out->used)); + CF_DEBUGF(infof(data, "cb_h3_readfunction %zd bytes%s (at %zd unacked)", + nread, *pflags == NGHTTP3_DATA_FLAG_EOF?" EOF":"", + out->used)); } if(stream->upload_done && !stream->upload_len && (stream->upload_left <= 0)) { - H3BUGF(infof(data, "cb_h3_readfunction sets EOF")); + CF_DEBUGF(infof(data, "cb_h3_readfunction sets EOF")); *pflags = NGHTTP3_DATA_FLAG_EOF; return nread ? 1 : 0; } @@ -1427,7 +1454,7 @@ static CURLcode h3_stream_open(struct Curl_cfilter *cf, Curl_safefree(nva); - infof(data, "Using HTTP/3 Stream ID: %x (easy handle %p)", + infof(data, "Using HTTP/3 Stream ID: %" PRIx64 " (easy handle %p)", stream3_id, (void *)data); Curl_pseudo_free(hreq); @@ -1469,8 +1496,8 @@ static ssize_t cf_ngtcp2_send(struct Curl_cfilter *cf, struct Curl_easy *data, sent = len; } else { - H3BUGF(infof(data, "ngh3_stream_send() wants to send %zd bytes", - len)); + CF_DEBUGF(infof(data, "ngh3_stream_send() wants to send %zd bytes", + len)); if(!stream->upload_len) { stream->upload_mem = buf; stream->upload_len = len; diff --git a/tests/tests-httpd/.gitignore b/tests/tests-httpd/.gitignore new file mode 100644 index 00000000000000..2214815cc21c55 --- /dev/null +++ b/tests/tests-httpd/.gitignore @@ -0,0 +1,6 @@ +# Copyright (C) 2000 - 2022 Daniel Stenberg, , et al. +# +# SPDX-License-Identifier: curl + +config.ini +gen \ No newline at end of file diff --git a/tests/tests-httpd/README.md b/tests/tests-httpd/README.md new file mode 100644 index 00000000000000..cf02b50aaf6bfa --- /dev/null +++ b/tests/tests-httpd/README.md @@ -0,0 +1,135 @@ + + +# The curl HTTPD Test Suite + +This is an additional test suite using a combination of Apache httpd and nghttpx servers to perform various tests beyond the capabilities of the standard curl test suite. + +# Usage + +The test cases and necessary files are in `tests/httpd`. You can invoke `pytest` from there or from the top level curl checkout and it will find all tests. + +``` +curl> pytest +platform darwin -- Python 3.9.15, pytest-6.2.0, py-1.10.0, pluggy-0.13.1 +rootdir: /Users/sei/projects/curl +collected 5 items + +tests/httpd/test_01_basic.py ..... +``` + +Pytest takes arguments. `-v` increases its verbosity and can be used several times. `-k ` can be used to run only matching test cases. The `expr` can be something resembling a python test or just a string that needs to match test cases in their names. + +``` +curl> pytest -vv -k test_01_02 +``` + +runs all test cases that have `test_01_02` in their name. This does not have to be the start of the name. + +Depending on your setup, some test cases may be skipped and appear as `s` in the output. If you run pytest verbose, it will also give you the reason for skipping. + + +# Prerequisites + +You will need: + +1. a recent Python, the `cryptography` module and, of course, `pytest` +2. a apache httpd development version. On Debian/Ubuntu, the package `apache2-dev` has this. +3. a local `curl` project build +3. optionally, a `nghttpx` with HTTP/3 enabled or h3 test cases will be skipped. + +### Configuration + +Via curl's `configure` script you may specify: + + * `--with-test-nghttpx=` if you have nghttpx to use somewhere outside your `$PATH`. + * `--with-test-httpd=` if you have an Apache httpd installed somewhere else. On Debian/Ubuntu it will otherwise look into `/usr/bin` and `/usr/sbin` to find those. + +## Usage Tips + +Several test cases are parameterized, for example with the HTTP version to use. If you want to run a test with a particular protocol only, use a command line like: + +``` +curl> pytest -k "test_02_06 and h2" +``` + +Several test cases can be repeated, they all have the `repeat` parameter. To make this work, you have to start `pytest` in the test directory itself (for some unknown reason). Like in: + +``` +curl/tests/tests-httpd> pytest -k "test_02_06 and h2" --repeat=100 +``` + +which then runs this test case a hundred times. In case of flaky tests, you can make pytest stop on the first one with: + +``` +curl/tests/tests-httpd> pytest -k "test_02_06 and h2" --repeat=100 --maxfail=1 +``` + +which allow you to inspect output and log files for the failed run. Speaking of log files, the verbosity of pytest is also used to collect curl trace output. If you specify `-v` three times, the `curl` command is started with `--trace`: + +``` +curl/tests/tests-httpd> pytest -vvv -k "test_02_06 and h2" --repeat=100 --maxfail=1 +``` + +all of curl's output and trace file are found in `tests/tests-httpd/gen/curl`. + +## Writing Tests + +There is a lot of [`pytest` documentation](https://docs.pytest.org/) with examples. No use in repeating that here. Assuming you are somewhat familiar with it, it is useful how *this* general test suite is setup. Especially if you want to add test cases. + +### Servers + +In `conftest.py` 3 "fixtures" are defined that are used by all test cases: + +1. `env`: the test environment. It is an instance of class `testenv/env.py:Env`. It holds all information about paths, availability of features (HTTP/3!), port numbers to use, domains and SSL certificates for those. +2. `httpd`: the Apache httpd instance, configured and started, then stopped at the end of the test suite. It has sites configured for the domains from `env`. It also loads a local module `mod_curltest?` and makes it available in certain locations. (more on mod_curltest below). +3. `nghttpx`: an instance of nghttpx that provides HTTP/3 support. `nghttpx` proxies those requests to the `httpd` server. In a direct mapping, so you may access all the resources under the same path as with HTTP/2. Only the port number used for HTTP/3 requests will be different. + +`pytest` manages these fixture so that they are created once and terminated before exit. This means you can `Ctrl-C` a running pytest and the server will shutdown. Only when you brutally chop its head off, might there be servers left +behind. + +### Test Cases + +Tests making use of these fixtures have them in their parameter list. This tells pytest that a particular test needs them, so it has to create them. Since one can invoke pytest for just a single test, it is important that a test references the ones it needs. + +All test cases start with `test_` in their name. We use a double number scheme to group them. This makes it ease to run only specific tests and also give a short mnemonic to communicate trouble with others in the project. Otherwise you are free to name test cases as you think fitting. + +Tests are grouped thematically in a file with a single Python test class. This is convenient if you need a special "fixture" for several tests. "fixtures" can have "class" scope. + +There is a curl helper class that knows how to invoke curl and interpret its output. Among other things, it does add the local CA to the command line, so that SSL connections to the test servers are verified. Nothing prevents anyone from running curl directly, for specific uses not covered by the `CurlClient` class. + +### mod_curltest + +The module source code is found in `testenv/mod_curltest`. It is compiled using the `apxs` command, commonly provided via the `apache2-dev` package. Compilation is quick and done once at the start of a test run. + +The module adds 2 "handlers" to the Apache server (right now). Handler are pieces of code that receive HTTP requests and generate the response. Those handlers are: + +* `curltest-echo`: hooked up on the path `/curltest/echo`. This one echoes a request and copies all data from the request body to the response body. Useful for simulating upload and checking that the data arrived as intended. +* `curltest-tweak`: hooked up on the path `/curltest/tweak`. This handler is more of a Swiss army knife. It interprets parameters from the URL query string to drive its behavior. + * `status=nnn`: generate a response with HTTP status code `nnn`. + * `chunks=n`: generate `n` chunks of data in the response body, defaults to 3. + * `chunk_size=nnn`: each chunk should contain `nnn` bytes of data. Maximum is 16KB right now. + * `chunkd_delay=duration`: wait `duration` time between writing chunks + * `delay=duration`: wait `duration` time to send the response headers + * `body_error=(timeout|reset)`: produce an error after the first chunk in the response body + * `id=str`: add `str` in the response header `request-id` + +`duration` values are integers, optionally followed by a unit. Units are: + + * `d`: days (probably not useful here) + * `h`: hours + * `mi`: minutes + * `s`: seconds (the default) + * `ms`: milliseconds + +As you can see, `mod_curltest`'s tweak handler allow to simulate many kinds of responses. An example of its use is `test_03_01` where responses are delayed using `chunk_delay`. This gives the response a defined duration and the test uses that to reload `httpd` in the middle of the first request. A graceful reload in httpd lets ongoing requests finish, but will close the connection afterwards and tear down the serving process. The following request need then to open a new connection. This is verified by the test case. + + + + + + + diff --git a/tests/tests-httpd/config.ini.in b/tests/tests-httpd/config.ini.in new file mode 100644 index 00000000000000..1a236947bd65d5 --- /dev/null +++ b/tests/tests-httpd/config.ini.in @@ -0,0 +1,39 @@ +# -*- coding: utf-8 -*- +#*************************************************************************** +# _ _ ____ _ +# Project ___| | | | _ \| | +# / __| | | | |_) | | +# | (__| |_| | _ <| |___ +# \___|\___/|_| \_\_____| +# +# Copyright (C) 2008 - 2022, Daniel Stenberg, , et al. +# +# This software is licensed as described in the file COPYING, which +# you should have received as part of this distribution. The terms +# are also available at https://curl.se/docs/copyright.html. +# +# You may opt to use, copy, modify, merge, publish, distribute and/or sell +# copies of the Software, and permit persons to whom the Software is +# furnished to do so, under the terms of the COPYING file. +# +# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY +# KIND, either express or implied. +# +# SPDX-License-Identifier: curl +# +########################################################################### +# +[global] + +[httpd] +apxs = @APXS@ +httpd = @HTTPD@ +apachectl = @APACHECTL@ + +[test] +http_port = 5001 +https_port = 5002 +h3_port = 5003 + +[nghttpx] +nghttpx = @HTTPD_NGHTTPX@ \ No newline at end of file diff --git a/tests/tests-httpd/conftest.py b/tests/tests-httpd/conftest.py new file mode 100644 index 00000000000000..475e89ca7c9ff1 --- /dev/null +++ b/tests/tests-httpd/conftest.py @@ -0,0 +1,81 @@ +#*************************************************************************** +# _ _ ____ _ +# Project ___| | | | _ \| | +# / __| | | | |_) | | +# | (__| |_| | _ <| |___ +# \___|\___/|_| \_\_____| +# +# Copyright (C) 2008 - 2022, Daniel Stenberg, , et al. +# +# This software is licensed as described in the file COPYING, which +# you should have received as part of this distribution. The terms +# are also available at https://curl.se/docs/copyright.html. +# +# You may opt to use, copy, modify, merge, publish, distribute and/or sell +# copies of the Software, and permit persons to whom the Software is +# furnished to do so, under the terms of the COPYING file. +# +# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY +# KIND, either express or implied. +# +# SPDX-License-Identifier: curl +# +########################################################################### +# +import logging +import os +import sys +from typing import Optional + +import pytest + +sys.path.append(os.path.join(os.path.dirname(__file__), '.')) + +from testenv import Env, Nghttpx, Httpd + + +def pytest_report_header(config, startdir): + return f"curl tests-httpd tests" + + +def pytest_addoption(parser): + parser.addoption("--repeat", action="store", type=int, default=1, + help='Number of times to repeat each test') + + +def pytest_generate_tests(metafunc): + if "repeat" in metafunc.fixturenames: + count = int(metafunc.config.getoption("repeat")) + metafunc.fixturenames.append('tmp_ct') + metafunc.parametrize('repeat', range(count)) + + +@pytest.fixture(scope="package") +def env(pytestconfig) -> Env: + env = Env(pytestconfig=pytestconfig) + level = logging.DEBUG if env.verbose > 0 else logging.INFO + logging.getLogger('').setLevel(level=level) + env.setup() + return env + + +@pytest.fixture(scope='package') +def httpd(env) -> Httpd: + httpd = Httpd(env=env) + assert httpd.exists(), f'httpd not found: {env.httpd}' + httpd.clear_logs() + assert httpd.start() + yield httpd + httpd.stop() + + +@pytest.fixture(scope='package') +def nghttpx(env) -> Optional[Nghttpx]: + if env.have_h3_server(): + nghttpx = Nghttpx(env=env) + nghttpx.clear_logs() + assert nghttpx.start() + yield nghttpx + nghttpx.stop() + return None + diff --git a/tests/tests-httpd/test_01_basic.py b/tests/tests-httpd/test_01_basic.py new file mode 100644 index 00000000000000..4154d13f0ffa3d --- /dev/null +++ b/tests/tests-httpd/test_01_basic.py @@ -0,0 +1,89 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +#*************************************************************************** +# _ _ ____ _ +# Project ___| | | | _ \| | +# / __| | | | |_) | | +# | (__| |_| | _ <| |___ +# \___|\___/|_| \_\_____| +# +# Copyright (C) 2008 - 2022, Daniel Stenberg, , et al. +# +# This software is licensed as described in the file COPYING, which +# you should have received as part of this distribution. The terms +# are also available at https://curl.se/docs/copyright.html. +# +# You may opt to use, copy, modify, merge, publish, distribute and/or sell +# copies of the Software, and permit persons to whom the Software is +# furnished to do so, under the terms of the COPYING file. +# +# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY +# KIND, either express or implied. +# +# SPDX-License-Identifier: curl +# +########################################################################### +# +import logging +import pytest + +from testenv import Env +from testenv import CurlClient + + +log = logging.getLogger(__name__) + + +@pytest.mark.skipif(condition=Env.setup_incomplete(), + reason=f"missing: {Env.incomplete_reason()}") +class TestBasic: + + # simple http: GET + def test_01_01_http_get(self, env: Env, httpd): + curl = CurlClient(env=env) + url = f'http://{env.domain1}:{env.http_port}/data.json' + r = curl.http_get(url=url) + assert r.exit_code == 0 + assert r.response['status'] == 200 + assert r.json['server'] == env.domain1 + + # simple https: GET, any http version + def test_01_02_https_get(self, env: Env, httpd): + curl = CurlClient(env=env) + url = f'https://{env.domain1}:{env.https_port}/data.json' + r = curl.http_get(url=url) + assert r.exit_code == 0 + assert r.response['status'] == 200 + assert r.json['server'] == env.domain1 + + # simple https: GET, h2 wanted and got + def test_01_02_h2_get(self, env: Env, httpd): + curl = CurlClient(env=env) + url = f'https://{env.domain1}:{env.https_port}/data.json' + r = curl.http_get(url=url, extra_args=['--http2']) + assert r.exit_code == 0 + assert r.response['status'] == 200 + assert r.response['protocol'] == 'HTTP/2' + assert r.json['server'] == env.domain1 + + # simple https: GET, h2 unsupported, fallback to h1 + def test_01_02_h2_unsupported(self, env: Env, httpd): + curl = CurlClient(env=env) + url = f'https://{env.domain2}:{env.https_port}/data.json' + r = curl.http_get(url=url, extra_args=['--http2']) + assert r.exit_code == 0 + assert r.response['status'] == 200 + assert r.response['protocol'] == 'HTTP/1.1' + assert r.json['server'] == env.domain2 + + # simple h3: GET, want h3 and get it + @pytest.mark.skipif(condition=not Env.have_h3_curl(), reason="no h3 curl") + @pytest.mark.skipif(condition=not Env.have_h3_server(), reason="no h3 server") + def test_01_03_h3_get(self, env: Env, httpd, nghttpx): + curl = CurlClient(env=env) + url = f'https://{env.domain1}:{env.h3_port}/data.json' + r = curl.http_get(url=url, extra_args=['--http3']) + assert r.exit_code == 0, f'{r}' + assert r.response['status'] == 200 + assert r.response['protocol'] == 'HTTP/3' + assert r.json['server'] == env.domain1 diff --git a/tests/tests-httpd/test_02_download.py b/tests/tests-httpd/test_02_download.py new file mode 100644 index 00000000000000..5a6580bdb005cc --- /dev/null +++ b/tests/tests-httpd/test_02_download.py @@ -0,0 +1,173 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +#*************************************************************************** +# _ _ ____ _ +# Project ___| | | | _ \| | +# / __| | | | |_) | | +# | (__| |_| | _ <| |___ +# \___|\___/|_| \_\_____| +# +# Copyright (C) 2008 - 2022, Daniel Stenberg, , et al. +# +# This software is licensed as described in the file COPYING, which +# you should have received as part of this distribution. The terms +# are also available at https://curl.se/docs/copyright.html. +# +# You may opt to use, copy, modify, merge, publish, distribute and/or sell +# copies of the Software, and permit persons to whom the Software is +# furnished to do so, under the terms of the COPYING file. +# +# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY +# KIND, either express or implied. +# +# SPDX-License-Identifier: curl +# +########################################################################### +# +import json +import logging +from typing import Optional +import pytest + +from testenv import Env, CurlClient, ExecResult + + +log = logging.getLogger(__name__) + + +@pytest.mark.skipif(condition=Env.setup_incomplete(), + reason=f"missing: {Env.incomplete_reason()}") +class TestDownload: + + # download 1 file + @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3']) + def test_02_01_download_1(self, env: Env, httpd, nghttpx, repeat, proto): + if proto == 'h3' and not env.have_h3(): + pytest.skip("h3 not supported") + curl = CurlClient(env=env) + url = f'https://{env.authority_for(env.domain1, proto)}/data.json' + r = curl.http_download(urls=[url], alpn_proto=proto) + assert r.exit_code == 0, f'{r}' + r.check_responses(count=1, exp_status=200) + + # download 2 files + @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3']) + def test_02_02_download_2(self, env: Env, httpd, nghttpx, repeat, proto): + if proto == 'h3' and not env.have_h3(): + pytest.skip("h3 not supported") + curl = CurlClient(env=env) + url = f'https://{env.authority_for(env.domain1, proto)}/data.json?[0-1]' + r = curl.http_download(urls=[url], alpn_proto=proto) + assert r.exit_code == 0 + r.check_responses(count=2, exp_status=200) + + # download 100 files sequentially + @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3']) + def test_02_03_download_100_sequential(self, env: Env, + httpd, nghttpx, repeat, proto): + if proto == 'h3' and not env.have_h3(): + pytest.skip("h3 not supported") + curl = CurlClient(env=env) + urln = f'https://{env.authority_for(env.domain1, proto)}/data.json?[0-99]' + r = curl.http_download(urls=[urln], alpn_proto=proto) + assert r.exit_code == 0 + r.check_responses(count=100, exp_status=200) + assert len(r.stats) == 100, f'{r.stats}' + # http/1.1 sequential transfers will open 1 connection + assert r.total_connects == 1 + + # download 100 files parallel + @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3']) + def test_02_04_download_100_parallel(self, env: Env, + httpd, nghttpx, repeat, proto): + if proto == 'h3' and not env.have_h3(): + pytest.skip("h3 not supported") + curl = CurlClient(env=env) + urln = f'https://{env.authority_for(env.domain1, proto)}/data.json?[0-99]' + r = curl.http_download(urls=[urln], alpn_proto=proto, + extra_args=['--parallel']) + assert r.exit_code == 0 + r.check_responses(count=100, exp_status=200) + if proto == 'http/1.1': + # http/1.1 parallel transfers will open multiple connections + assert r.total_connects > 1 + else: + # http2 parallel transfers will use one connection (common limit is 100) + assert r.total_connects == 1 + + # download 500 files sequential + @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3']) + def test_02_05_download_500_sequential(self, env: Env, + httpd, nghttpx, repeat, proto): + if proto == 'h3' and not env.have_h3(): + pytest.skip("h3 not supported") + curl = CurlClient(env=env) + urln = f'https://{env.authority_for(env.domain1, proto)}/data.json?[0-499]' + r = curl.http_download(urls=[urln], alpn_proto=proto) + assert r.exit_code == 0 + r.check_responses(count=500, exp_status=200) + if proto == 'http/1.1': + # http/1.1 parallel transfers will open multiple connections + assert r.total_connects > 1 + else: + # http2 parallel transfers will use one connection (common limit is 100) + assert r.total_connects == 1 + + # download 500 files parallel (default max of 100) + @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3']) + def test_02_06_download_500_parallel(self, env: Env, + httpd, nghttpx, repeat, proto): + if proto == 'h3' and not env.have_h3(): + pytest.skip("h3 not supported") + curl = CurlClient(env=env) + urln = f'https://{env.authority_for(env.domain1, proto)}/data.json?[000-499]' + r = curl.http_download(urls=[urln], alpn_proto=proto, + extra_args=['--parallel']) + assert r.exit_code == 0 + r.check_responses(count=500, exp_status=200) + if proto == 'http/1.1': + # http/1.1 parallel transfers will open multiple connections + assert r.total_connects > 1 + else: + # http2 parallel transfers will use one connection (common limit is 100) + assert r.total_connects == 1 + + # download 500 files parallel (max of 200), only h2 + @pytest.mark.skip(reason="TODO: we get 101 connections created. PIPEWAIT needs a fix") + @pytest.mark.parametrize("proto", ['h2', 'h3']) + def test_02_07_download_500_parallel(self, env: Env, + httpd, nghttpx, repeat, proto): + if proto == 'h3' and not env.have_h3(): + pytest.skip("h3 not supported") + curl = CurlClient(env=env) + urln = f'https://{env.authority_for(env.domain1, proto)}/data.json?[0-499]' + r = curl.http_download(urls=[urln], alpn_proto=proto, + with_stats=False, extra_args=[ + '--parallel', '--parallel-max', '200' + ]) + assert r.exit_code == 0, f'{r}' + r.check_responses(count=500, exp_status=200) + # http2 should now use 2 connections, at most 5 + assert r.total_connects <= 5, "h2 should use fewer connections here" + + def check_response(self, r: ExecResult, count: int, + exp_status: Optional[int] = None): + if len(r.responses) != count: + seen_queries = [] + for idx, resp in enumerate(r.responses): + assert resp['status'] == 200, f'response #{idx} status: {resp["status"]}' + if 'rquery' not in resp['header']: + log.error(f'response #{idx} missing "rquery": {resp["header"]}') + seen_queries.append(int(resp['header']['rquery'])) + for i in range(0,count-1): + if i not in seen_queries: + log.error(f'response for query {i} missing') + if r.with_stats and len(r.stats) == count: + log.error(f'got all {count} stats, though') + assert len(r.responses) == count + if exp_status is not None: + for idx, x in enumerate(r.responses): + assert x['status'] == exp_status, \ + f'response #{idx} unexpectedstatus: {x["status"]}' + if r.with_stats: + assert len(r.stats) == count, f'{r}' diff --git a/tests/tests-httpd/test_03_goaway.py b/tests/tests-httpd/test_03_goaway.py new file mode 100644 index 00000000000000..db8be80f1e0c04 --- /dev/null +++ b/tests/tests-httpd/test_03_goaway.py @@ -0,0 +1,113 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +#*************************************************************************** +# _ _ ____ _ +# Project ___| | | | _ \| | +# / __| | | | |_) | | +# | (__| |_| | _ <| |___ +# \___|\___/|_| \_\_____| +# +# Copyright (C) 2008 - 2022, Daniel Stenberg, , et al. +# +# This software is licensed as described in the file COPYING, which +# you should have received as part of this distribution. The terms +# are also available at https://curl.se/docs/copyright.html. +# +# You may opt to use, copy, modify, merge, publish, distribute and/or sell +# copies of the Software, and permit persons to whom the Software is +# furnished to do so, under the terms of the COPYING file. +# +# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY +# KIND, either express or implied. +# +# SPDX-License-Identifier: curl +# +########################################################################### +# +import json +import logging +import time +from datetime import timedelta +from threading import Thread +from typing import Optional +import pytest + +from testenv import Env, CurlClient, ExecResult + + +log = logging.getLogger(__name__) + + +@pytest.mark.skipif(condition=Env.setup_incomplete(), + reason=f"missing: {Env.incomplete_reason()}") +class TestGoAway: + + # download files sequentially with delay, reload server for GOAWAY + def test_03_01_h2_goaway(self, env: Env, httpd, nghttpx, repeat): + proto = 'h2' + count = 3 + self.r = None + def long_run(): + curl = CurlClient(env=env) + # send 10 chunks of 1024 bytest in a response body with 100ms delay inbetween + urln = f'https://{env.authority_for(env.domain1, proto)}' \ + f'/curltest/tweak?id=[0-{count - 1}]'\ + '&chunks=10&chunk_size=1024&chunk_delay=100ms' + self.r = curl.http_download(urls=[urln], alpn_proto=proto) + + t = Thread(target=long_run) + t.start() + # each request will take a second, reload the server in the middle + # of the first one. + time.sleep(1.5) + assert httpd.reload() + t.join() + r: ExecResult = self.r + assert r.exit_code == 0, f'{r}' + r.check_responses(count=count, exp_status=200) + assert len(r.stats) == count, f'{r.stats}' + # reload will shut down the connection gracefully with GOAWAY + # we expect to see a second connetion opened afterwards + assert r.total_connects == 2 + for idx, s in enumerate(r.stats): + if s['num_connects'] > 0: + log.debug(f'request {idx} connected') + # this should take `count` seconds to retrieve + assert r.duration >= timedelta(seconds=count) + + # download files sequentially with delay, reload server for GOAWAY + @pytest.mark.skipif(condition=not Env.have_h3_server(), reason="no h3 server") + @pytest.mark.skipif(condition=True, reason="2nd and 3rd request sometimes fail") + def test_03_02_h3_goaway(self, env: Env, httpd, nghttpx, repeat): + proto = 'h3' + count = 3 + self.r = None + def long_run(): + curl = CurlClient(env=env) + # send 10 chunks of 1024 bytest in a response body with 100ms delay inbetween + urln = f'https://{env.authority_for(env.domain1, proto)}' \ + f'/curltest/tweak?id=[0-{count - 1}]'\ + '&chunks=10&chunk_size=1024&chunk_delay=100ms' + self.r = curl.http_download(urls=[urln], alpn_proto=proto) + + t = Thread(target=long_run) + t.start() + # each request will take a second, reload the server in the middle + # of the first one. + time.sleep(1.5) + assert nghttpx.reload(timeout=timedelta(seconds=5)) + t.join() + r: ExecResult = self.r + assert r.exit_code == 0, f'{r}' + r.check_responses(count=count, exp_status=200) + assert len(r.stats) == count, f'{r.stats}' + # reload will shut down the connection gracefully with GOAWAY + # we expect to see a second connetion opened afterwards + assert r.total_connects == 2 + for idx, s in enumerate(r.stats): + if s['num_connects'] > 0: + log.debug(f'request {idx} connected') + # this should take `count` seconds to retrieve + assert r.duration >= timedelta(seconds=count) + + diff --git a/tests/tests-httpd/test_04_stuttered.py b/tests/tests-httpd/test_04_stuttered.py new file mode 100644 index 00000000000000..622254bb3b1cb5 --- /dev/null +++ b/tests/tests-httpd/test_04_stuttered.py @@ -0,0 +1,143 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +#*************************************************************************** +# _ _ ____ _ +# Project ___| | | | _ \| | +# / __| | | | |_) | | +# | (__| |_| | _ <| |___ +# \___|\___/|_| \_\_____| +# +# Copyright (C) 2008 - 2022, Daniel Stenberg, , et al. +# +# This software is licensed as described in the file COPYING, which +# you should have received as part of this distribution. The terms +# are also available at https://curl.se/docs/copyright.html. +# +# You may opt to use, copy, modify, merge, publish, distribute and/or sell +# copies of the Software, and permit persons to whom the Software is +# furnished to do so, under the terms of the COPYING file. +# +# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY +# KIND, either express or implied. +# +# SPDX-License-Identifier: curl +# +########################################################################### +# +import logging +from typing import Tuple, List, Dict +import pytest + +from testenv import Env, CurlClient + + +log = logging.getLogger(__name__) + + +@pytest.mark.skipif(condition=Env.setup_incomplete(), + reason=f"missing: {Env.incomplete_reason()}") +class TestStuttered: + + # download 1 file, check that delayed response works in general + @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3']) + def test_04_01_download_1(self, env: Env, httpd, nghttpx, repeat, + proto): + if proto == 'h3' and not env.have_h3(): + pytest.skip("h3 not supported") + count = 1 + curl = CurlClient(env=env) + urln = f'https://{env.authority_for(env.domain1, proto)}' \ + f'/curltest/tweak?id=[0-{count - 1}]'\ + '&chunks=100&chunk_size=100&chunk_delay=10ms' + r = curl.http_download(urls=[urln], alpn_proto=proto) + assert r.exit_code == 0, f'{r}' + r.check_responses(count=1, exp_status=200) + + # download 50 files in 100 chunks a 100 bytes with 10ms delay between + # prepend 100 file requests to warm up connection procesing limits + # (Apache2 increases # of parallel processed requests after successes) + @pytest.mark.parametrize("proto", ['h2', 'h3']) + def test_04_02_100_100_10(self, env: Env, + httpd, nghttpx, repeat, proto): + if proto == 'h3' and not env.have_h3(): + pytest.skip("h3 not supported") + count = 50 + warmups = 100 + curl = CurlClient(env=env) + url1 = f'https://{env.authority_for(env.domain1, proto)}/data.json?[0-{warmups-1}]' + urln = f'https://{env.authority_for(env.domain1, proto)}' \ + f'/curltest/tweak?id=[0-{count-1}]'\ + '&chunks=100&chunk_size=100&chunk_delay=10ms' + r = curl.http_download(urls=[url1, urln], alpn_proto=proto, + extra_args=['--parallel']) + assert r.exit_code == 0, f'{r}' + r.check_responses(count=warmups+count, exp_status=200) + assert r.total_connects == 1 + t_avg, i_min, t_min, i_max, t_max = self.stats_spread(r.stats[warmups:], 'time_total') + assert t_max < (3 * t_min) and t_min < 2, \ + f'avg time of transfer: {t_avg} [{i_min}={t_min}, {i_max}={t_max}]' + + # download 50 files in 1000 chunks a 10 bytes with 1ms delay between + # prepend 100 file requests to warm up connection procesing limits + # (Apache2 increases # of parallel processed requests after successes) + @pytest.mark.parametrize("proto", ['h2', 'h3']) + def test_04_03_1000_10_1(self, env: Env, httpd, nghttpx, repeat, proto): + if proto == 'h3' and not env.have_h3(): + pytest.skip("h3 not supported") + count = 50 + warmups = 100 + curl = CurlClient(env=env) + url1 = f'https://{env.authority_for(env.domain1, proto)}/data.json?[0-{warmups-1}]' + urln = f'https://{env.authority_for(env.domain1, proto)}' \ + f'/curltest/tweak?id=[0-{count - 1}]'\ + '&chunks=1000&chunk_size=10&chunk_delay=100us' + r = curl.http_download(urls=[url1, urln], alpn_proto=proto, + extra_args=['--parallel']) + assert r.exit_code == 0 + r.check_responses(count=warmups+count, exp_status=200) + assert r.total_connects == 1 + t_avg, i_min, t_min, i_max, t_max = self.stats_spread(r.stats[warmups:], 'time_total') + assert t_max < (3 * t_min) and t_min < 2.5, \ + f'avg time of transfer: {t_avg} [{i_min}={t_min}, {i_max}={t_max}]' + + # download 50 files in 10000 chunks a 1 byte with 10us delay between + # prepend 100 file requests to warm up connection procesing limits + # (Apache2 increases # of parallel processed requests after successes) + @pytest.mark.parametrize("proto", ['h2', 'h3']) + def test_04_04_1000_10_1(self, env: Env, httpd, nghttpx, repeat, proto): + if proto == 'h3' and not env.have_h3(): + pytest.skip("h3 not supported") + if proto == 'h2': + pytest.skip("h2 shows overly long request times") + count = 50 + warmups = 100 + curl = CurlClient(env=env) + url1 = f'https://{env.authority_for(env.domain1, proto)}/data.json?[0-{warmups-1}]' + urln = f'https://{env.authority_for(env.domain1, proto)}' \ + f'/curltest/tweak?id=[0-{count - 1}]'\ + '&chunks=10000&chunk_size=1&chunk_delay=50us' + r = curl.http_download(urls=[url1, urln], alpn_proto=proto, + extra_args=['--parallel']) + assert r.exit_code == 0 + r.check_responses(count=warmups+count, exp_status=200) + assert r.total_connects == 1 + t_avg, i_min, t_min, i_max, t_max = self.stats_spread(r.stats[warmups:], 'time_total') + assert t_max < (3 * t_min) and t_min < 3, \ + f'avg time of transfer: {t_avg} [{i_min}={t_min}, {i_max}={t_max}]' + + def stats_spread(self, stats: List[Dict], key: str) -> Tuple[float, int, float, int, float]: + stotals = 0.0 + s_min = 100.0 + i_min = -1 + s_max = 0.0 + i_max = -1 + for idx, s in enumerate(stats): + val = float(s[key]) + stotals += val + if val > s_max: + s_max = val + i_max = idx + if val < s_min: + s_min = val + i_min = idx + return stotals/len(stats), i_min, s_min, i_max, s_max diff --git a/tests/tests-httpd/test_05_errors.py b/tests/tests-httpd/test_05_errors.py new file mode 100644 index 00000000000000..5a56683eb5ad9d --- /dev/null +++ b/tests/tests-httpd/test_05_errors.py @@ -0,0 +1,87 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +#*************************************************************************** +# _ _ ____ _ +# Project ___| | | | _ \| | +# / __| | | | |_) | | +# | (__| |_| | _ <| |___ +# \___|\___/|_| \_\_____| +# +# Copyright (C) 2008 - 2022, Daniel Stenberg, , et al. +# +# This software is licensed as described in the file COPYING, which +# you should have received as part of this distribution. The terms +# are also available at https://curl.se/docs/copyright.html. +# +# You may opt to use, copy, modify, merge, publish, distribute and/or sell +# copies of the Software, and permit persons to whom the Software is +# furnished to do so, under the terms of the COPYING file. +# +# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY +# KIND, either express or implied. +# +# SPDX-License-Identifier: curl +# +########################################################################### +# +import json +import logging +from typing import Optional, Tuple, List, Dict +import pytest + +from testenv import Env, CurlClient, ExecResult + + +log = logging.getLogger(__name__) + + +@pytest.mark.skipif(condition=Env.setup_incomplete(), + reason=f"missing: {Env.incomplete_reason()}") +class TestErrors: + + # download 1 file, check that we get CURLE_PARTIAL_FILE + @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3']) + def test_05_01_partial_1(self, env: Env, httpd, nghttpx, repeat, + proto): + if proto == 'h3' and not env.have_h3(): + pytest.skip("h3 not supported") + if proto == 'h2': # TODO, fix error code in curl + pytest.skip("h2 reports exitcode 16(CURLE_HTTP2)") + if proto == 'h3': # TODO, fix error code in curl + pytest.skip("h3 reports exitcode 95(CURLE_HTTP3)") + count = 1 + curl = CurlClient(env=env) + urln = f'https://{env.authority_for(env.domain1, proto)}' \ + f'/curltest/tweak?id=[0-{count - 1}]'\ + '&chunks=3&chunk_size=16000&body_error=reset' + r = curl.http_download(urls=[urln], alpn_proto=proto) + assert r.exit_code != 0, f'{r}' + invalid_stats = [] + for idx, s in enumerate(r.stats): + if 'exitcode' not in s or s['exitcode'] != 18: + invalid_stats.append(f'request {idx} exit with {s["exitcode"]}') + assert len(invalid_stats) == 0, f'failed: {invalid_stats}' + + # download 20 file, check that we get CURLE_PARTIAL_FILE for all + @pytest.mark.parametrize("proto", ['h2', 'h3']) + def test_05_02_partial_20(self, env: Env, httpd, nghttpx, repeat, + proto): + if proto == 'h3' and not env.have_h3(): + pytest.skip("h3 not supported") + if proto == 'h2': # TODO, fix error code in curl + pytest.skip("h2 reports exitcode 16(CURLE_HTTP2)") + if proto == 'h3': # TODO, fix error code in curl + pytest.skip("h3 reports exitcode 95(CURLE_HTTP3) and takes a long time") + count = 20 + curl = CurlClient(env=env) + urln = f'https://{env.authority_for(env.domain1, proto)}' \ + f'/curltest/tweak?id=[0-{count - 1}]'\ + '&chunks=3&chunk_size=16000&body_error=reset' + r = curl.http_download(urls=[urln], alpn_proto=proto) + assert r.exit_code != 0, f'{r}' + assert len(r.stats) == count, f'did not get all stats: {r}' + invalid_stats = [] + for idx, s in enumerate(r.stats): + if 'exitcode' not in s or s['exitcode'] != 18: + invalid_stats.append(f'request {idx} exit with {s["exitcode"]}') + assert len(invalid_stats) == 0, f'failed: {invalid_stats}' diff --git a/tests/tests-httpd/testenv/__init__.py b/tests/tests-httpd/testenv/__init__.py new file mode 100644 index 00000000000000..7ecf6c97f2a5de --- /dev/null +++ b/tests/tests-httpd/testenv/__init__.py @@ -0,0 +1,31 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +#*************************************************************************** +# _ _ ____ _ +# Project ___| | | | _ \| | +# / __| | | | |_) | | +# | (__| |_| | _ <| |___ +# \___|\___/|_| \_\_____| +# +# Copyright (C) 2008 - 2022, Daniel Stenberg, , et al. +# +# This software is licensed as described in the file COPYING, which +# you should have received as part of this distribution. The terms +# are also available at https://curl.se/docs/copyright.html. +# +# You may opt to use, copy, modify, merge, publish, distribute and/or sell +# copies of the Software, and permit persons to whom the Software is +# furnished to do so, under the terms of the COPYING file. +# +# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY +# KIND, either express or implied. +# +# SPDX-License-Identifier: curl +# +########################################################################### +# +from .env import Env +from .certs import TestCA, Credentials +from .httpd import Httpd +from .curl import CurlClient, ExecResult +from .nghttpx import Nghttpx \ No newline at end of file diff --git a/tests/tests-httpd/testenv/certs.py b/tests/tests-httpd/testenv/certs.py new file mode 100644 index 00000000000000..1f29cefa0a6d8f --- /dev/null +++ b/tests/tests-httpd/testenv/certs.py @@ -0,0 +1,528 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +#*************************************************************************** +# _ _ ____ _ +# Project ___| | | | _ \| | +# / __| | | | |_) | | +# | (__| |_| | _ <| |___ +# \___|\___/|_| \_\_____| +# +# Copyright (C) 2008 - 2022, Daniel Stenberg, , et al. +# +# This software is licensed as described in the file COPYING, which +# you should have received as part of this distribution. The terms +# are also available at https://curl.se/docs/copyright.html. +# +# You may opt to use, copy, modify, merge, publish, distribute and/or sell +# copies of the Software, and permit persons to whom the Software is +# furnished to do so, under the terms of the COPYING file. +# +# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY +# KIND, either express or implied. +# +# SPDX-License-Identifier: curl +# +########################################################################### +# +import os +import re +from datetime import timedelta, datetime +from typing import List, Any, Optional + +from cryptography import x509 +from cryptography.hazmat.backends import default_backend +from cryptography.hazmat.primitives import hashes +from cryptography.hazmat.primitives.asymmetric import ec, rsa +from cryptography.hazmat.primitives.asymmetric.ec import EllipticCurvePrivateKey +from cryptography.hazmat.primitives.asymmetric.rsa import RSAPrivateKey +from cryptography.hazmat.primitives.serialization import Encoding, PrivateFormat, NoEncryption, load_pem_private_key +from cryptography.x509 import ExtendedKeyUsageOID, NameOID + + +EC_SUPPORTED = {} +EC_SUPPORTED.update([(curve.name.upper(), curve) for curve in [ + ec.SECP192R1, + ec.SECP224R1, + ec.SECP256R1, + ec.SECP384R1, +]]) + + +def _private_key(key_type): + if isinstance(key_type, str): + key_type = key_type.upper() + m = re.match(r'^(RSA)?(\d+)$', key_type) + if m: + key_type = int(m.group(2)) + + if isinstance(key_type, int): + return rsa.generate_private_key( + public_exponent=65537, + key_size=key_type, + backend=default_backend() + ) + if not isinstance(key_type, ec.EllipticCurve) and key_type in EC_SUPPORTED: + key_type = EC_SUPPORTED[key_type] + return ec.generate_private_key( + curve=key_type, + backend=default_backend() + ) + + +class CertificateSpec: + + def __init__(self, name: Optional[str] = None, + domains: Optional[List[str]] = None, + email: Optional[str] = None, + key_type: Optional[str] = None, + single_file: bool = False, + valid_from: timedelta = timedelta(days=-1), + valid_to: timedelta = timedelta(days=89), + client: bool = False, + sub_specs: Optional[List['CertificateSpec']] = None): + self._name = name + self.domains = domains + self.client = client + self.email = email + self.key_type = key_type + self.single_file = single_file + self.valid_from = valid_from + self.valid_to = valid_to + self.sub_specs = sub_specs + + @property + def name(self) -> Optional[str]: + if self._name: + return self._name + elif self.domains: + return self.domains[0] + return None + + @property + def type(self) -> Optional[str]: + if self.domains and len(self.domains): + return "server" + elif self.client: + return "client" + elif self.name: + return "ca" + return None + + +class Credentials: + + def __init__(self, + name: str, + cert: Any, + pkey: Any, + issuer: Optional['Credentials'] = None): + self._name = name + self._cert = cert + self._pkey = pkey + self._issuer = issuer + self._cert_file = None + self._pkey_file = None + self._store = None + + @property + def name(self) -> str: + return self._name + + @property + def subject(self) -> x509.Name: + return self._cert.subject + + @property + def key_type(self): + if isinstance(self._pkey, RSAPrivateKey): + return f"rsa{self._pkey.key_size}" + elif isinstance(self._pkey, EllipticCurvePrivateKey): + return f"{self._pkey.curve.name}" + else: + raise Exception(f"unknown key type: {self._pkey}") + + @property + def private_key(self) -> Any: + return self._pkey + + @property + def certificate(self) -> Any: + return self._cert + + @property + def cert_pem(self) -> bytes: + return self._cert.public_bytes(Encoding.PEM) + + @property + def pkey_pem(self) -> bytes: + return self._pkey.private_bytes( + Encoding.PEM, + PrivateFormat.TraditionalOpenSSL if self.key_type.startswith('rsa') else PrivateFormat.PKCS8, + NoEncryption()) + + @property + def issuer(self) -> Optional['Credentials']: + return self._issuer + + def set_store(self, store: 'CertStore'): + self._store = store + + def set_files(self, cert_file: str, pkey_file: Optional[str] = None, + combined_file: Optional[str] = None): + self._cert_file = cert_file + self._pkey_file = pkey_file + self._combined_file = combined_file + + @property + def cert_file(self) -> str: + return self._cert_file + + @property + def pkey_file(self) -> Optional[str]: + return self._pkey_file + + @property + def combined_file(self) -> Optional[str]: + return self._combined_file + + def get_first(self, name) -> Optional['Credentials']: + creds = self._store.get_credentials_for_name(name) if self._store else [] + return creds[0] if len(creds) else None + + def get_credentials_for_name(self, name) -> List['Credentials']: + return self._store.get_credentials_for_name(name) if self._store else [] + + def issue_certs(self, specs: List[CertificateSpec], + chain: Optional[List['Credentials']] = None) -> List['Credentials']: + return [self.issue_cert(spec=spec, chain=chain) for spec in specs] + + def issue_cert(self, spec: CertificateSpec, + chain: Optional[List['Credentials']] = None) -> 'Credentials': + key_type = spec.key_type if spec.key_type else self.key_type + creds = None + if self._store: + creds = self._store.load_credentials( + name=spec.name, key_type=key_type, single_file=spec.single_file, issuer=self) + if creds is None: + creds = TestCA.create_credentials(spec=spec, issuer=self, key_type=key_type, + valid_from=spec.valid_from, valid_to=spec.valid_to) + if self._store: + self._store.save(creds, single_file=spec.single_file) + if spec.type == "ca": + self._store.save_chain(creds, "ca", with_root=True) + + if spec.sub_specs: + if self._store: + sub_store = CertStore(fpath=os.path.join(self._store.path, creds.name)) + creds.set_store(sub_store) + subchain = chain.copy() if chain else [] + subchain.append(self) + creds.issue_certs(spec.sub_specs, chain=subchain) + return creds + + +class CertStore: + + def __init__(self, fpath: str): + self._store_dir = fpath + if not os.path.exists(self._store_dir): + os.makedirs(self._store_dir) + self._creds_by_name = {} + + @property + def path(self) -> str: + return self._store_dir + + def save(self, creds: Credentials, name: Optional[str] = None, + chain: Optional[List[Credentials]] = None, + single_file: bool = False) -> None: + name = name if name is not None else creds.name + cert_file = self.get_cert_file(name=name, key_type=creds.key_type) + pkey_file = self.get_pkey_file(name=name, key_type=creds.key_type) + comb_file = self.get_combined_file(name=name, key_type=creds.key_type) + if single_file: + pkey_file = None + with open(cert_file, "wb") as fd: + fd.write(creds.cert_pem) + if chain: + for c in chain: + fd.write(c.cert_pem) + if pkey_file is None: + fd.write(creds.pkey_pem) + if pkey_file is not None: + with open(pkey_file, "wb") as fd: + fd.write(creds.pkey_pem) + with open(comb_file, "wb") as fd: + fd.write(creds.cert_pem) + if chain: + for c in chain: + fd.write(c.cert_pem) + fd.write(creds.pkey_pem) + creds.set_files(cert_file, pkey_file, comb_file) + self._add_credentials(name, creds) + + def save_chain(self, creds: Credentials, infix: str, with_root=False): + name = creds.name + chain = [creds] + while creds.issuer is not None: + creds = creds.issuer + chain.append(creds) + if not with_root and len(chain) > 1: + chain = chain[:-1] + chain_file = os.path.join(self._store_dir, f'{name}-{infix}.pem') + with open(chain_file, "wb") as fd: + for c in chain: + fd.write(c.cert_pem) + + def _add_credentials(self, name: str, creds: Credentials): + if name not in self._creds_by_name: + self._creds_by_name[name] = [] + self._creds_by_name[name].append(creds) + + def get_credentials_for_name(self, name) -> List[Credentials]: + return self._creds_by_name[name] if name in self._creds_by_name else [] + + def get_cert_file(self, name: str, key_type=None) -> str: + key_infix = ".{0}".format(key_type) if key_type is not None else "" + return os.path.join(self._store_dir, f'{name}{key_infix}.cert.pem') + + def get_pkey_file(self, name: str, key_type=None) -> str: + key_infix = ".{0}".format(key_type) if key_type is not None else "" + return os.path.join(self._store_dir, f'{name}{key_infix}.pkey.pem') + + def get_combined_file(self, name: str, key_type=None) -> str: + return os.path.join(self._store_dir, f'{name}.pem') + + def load_pem_cert(self, fpath: str) -> x509.Certificate: + with open(fpath) as fd: + return x509.load_pem_x509_certificate("".join(fd.readlines()).encode()) + + def load_pem_pkey(self, fpath: str): + with open(fpath) as fd: + return load_pem_private_key("".join(fd.readlines()).encode(), password=None) + + def load_credentials(self, name: str, key_type=None, + single_file: bool = False, + issuer: Optional[Credentials] = None): + cert_file = self.get_cert_file(name=name, key_type=key_type) + pkey_file = cert_file if single_file else self.get_pkey_file(name=name, key_type=key_type) + comb_file = self.get_combined_file(name=name, key_type=key_type) + if os.path.isfile(cert_file) and os.path.isfile(pkey_file): + cert = self.load_pem_cert(cert_file) + pkey = self.load_pem_pkey(pkey_file) + creds = Credentials(name=name, cert=cert, pkey=pkey, issuer=issuer) + creds.set_store(self) + creds.set_files(cert_file, pkey_file, comb_file) + self._add_credentials(name, creds) + return creds + return None + + +class TestCA: + + @classmethod + def create_root(cls, name: str, store_dir: str, key_type: str = "rsa2048") -> Credentials: + store = CertStore(fpath=store_dir) + creds = store.load_credentials(name="ca", key_type=key_type, issuer=None) + if creds is None: + creds = TestCA._make_ca_credentials(name=name, key_type=key_type) + store.save(creds, name="ca") + creds.set_store(store) + return creds + + @staticmethod + def create_credentials(spec: CertificateSpec, issuer: Credentials, key_type: Any, + valid_from: timedelta = timedelta(days=-1), + valid_to: timedelta = timedelta(days=89), + ) -> Credentials: + """Create a certificate signed by this CA for the given domains. + :returns: the certificate and private key PEM file paths + """ + if spec.domains and len(spec.domains): + creds = TestCA._make_server_credentials(name=spec.name, domains=spec.domains, + issuer=issuer, valid_from=valid_from, + valid_to=valid_to, key_type=key_type) + elif spec.client: + creds = TestCA._make_client_credentials(name=spec.name, issuer=issuer, + email=spec.email, valid_from=valid_from, + valid_to=valid_to, key_type=key_type) + elif spec.name: + creds = TestCA._make_ca_credentials(name=spec.name, issuer=issuer, + valid_from=valid_from, valid_to=valid_to, + key_type=key_type) + else: + raise Exception(f"unrecognized certificate specification: {spec}") + return creds + + @staticmethod + def _make_x509_name(org_name: str = None, common_name: str = None, parent: x509.Name = None) -> x509.Name: + name_pieces = [] + if org_name: + oid = NameOID.ORGANIZATIONAL_UNIT_NAME if parent else NameOID.ORGANIZATION_NAME + name_pieces.append(x509.NameAttribute(oid, org_name)) + elif common_name: + name_pieces.append(x509.NameAttribute(NameOID.COMMON_NAME, common_name)) + if parent: + name_pieces.extend([rdn for rdn in parent]) + return x509.Name(name_pieces) + + @staticmethod + def _make_csr( + subject: x509.Name, + pkey: Any, + issuer_subject: Optional[Credentials], + valid_from_delta: timedelta = None, + valid_until_delta: timedelta = None + ): + pubkey = pkey.public_key() + issuer_subject = issuer_subject if issuer_subject is not None else subject + + valid_from = datetime.now() + if valid_until_delta is not None: + valid_from += valid_from_delta + valid_until = datetime.now() + if valid_until_delta is not None: + valid_until += valid_until_delta + + return ( + x509.CertificateBuilder() + .subject_name(subject) + .issuer_name(issuer_subject) + .public_key(pubkey) + .not_valid_before(valid_from) + .not_valid_after(valid_until) + .serial_number(x509.random_serial_number()) + .add_extension( + x509.SubjectKeyIdentifier.from_public_key(pubkey), + critical=False, + ) + ) + + @staticmethod + def _add_ca_usages(csr: Any) -> Any: + return csr.add_extension( + x509.BasicConstraints(ca=True, path_length=9), + critical=True, + ).add_extension( + x509.KeyUsage( + digital_signature=True, + content_commitment=False, + key_encipherment=False, + data_encipherment=False, + key_agreement=False, + key_cert_sign=True, + crl_sign=True, + encipher_only=False, + decipher_only=False), + critical=True + ).add_extension( + x509.ExtendedKeyUsage([ + ExtendedKeyUsageOID.CLIENT_AUTH, + ExtendedKeyUsageOID.SERVER_AUTH, + ExtendedKeyUsageOID.CODE_SIGNING, + ]), + critical=True + ) + + @staticmethod + def _add_leaf_usages(csr: Any, domains: List[str], issuer: Credentials) -> Any: + return csr.add_extension( + x509.BasicConstraints(ca=False, path_length=None), + critical=True, + ).add_extension( + x509.AuthorityKeyIdentifier.from_issuer_subject_key_identifier( + issuer.certificate.extensions.get_extension_for_class( + x509.SubjectKeyIdentifier).value), + critical=False + ).add_extension( + x509.SubjectAlternativeName([x509.DNSName(domain) for domain in domains]), + critical=True, + ).add_extension( + x509.ExtendedKeyUsage([ + ExtendedKeyUsageOID.SERVER_AUTH, + ]), + critical=True + ) + + @staticmethod + def _add_client_usages(csr: Any, issuer: Credentials, rfc82name: str = None) -> Any: + cert = csr.add_extension( + x509.BasicConstraints(ca=False, path_length=None), + critical=True, + ).add_extension( + x509.AuthorityKeyIdentifier.from_issuer_subject_key_identifier( + issuer.certificate.extensions.get_extension_for_class( + x509.SubjectKeyIdentifier).value), + critical=False + ) + if rfc82name: + cert.add_extension( + x509.SubjectAlternativeName([x509.RFC822Name(rfc82name)]), + critical=True, + ) + cert.add_extension( + x509.ExtendedKeyUsage([ + ExtendedKeyUsageOID.CLIENT_AUTH, + ]), + critical=True + ) + return cert + + @staticmethod + def _make_ca_credentials(name, key_type: Any, + issuer: Credentials = None, + valid_from: timedelta = timedelta(days=-1), + valid_to: timedelta = timedelta(days=89), + ) -> Credentials: + pkey = _private_key(key_type=key_type) + if issuer is not None: + issuer_subject = issuer.certificate.subject + issuer_key = issuer.private_key + else: + issuer_subject = None + issuer_key = pkey + subject = TestCA._make_x509_name(org_name=name, parent=issuer.subject if issuer else None) + csr = TestCA._make_csr(subject=subject, + issuer_subject=issuer_subject, pkey=pkey, + valid_from_delta=valid_from, valid_until_delta=valid_to) + csr = TestCA._add_ca_usages(csr) + cert = csr.sign(private_key=issuer_key, + algorithm=hashes.SHA256(), + backend=default_backend()) + return Credentials(name=name, cert=cert, pkey=pkey, issuer=issuer) + + @staticmethod + def _make_server_credentials(name: str, domains: List[str], issuer: Credentials, + key_type: Any, + valid_from: timedelta = timedelta(days=-1), + valid_to: timedelta = timedelta(days=89), + ) -> Credentials: + name = name + pkey = _private_key(key_type=key_type) + subject = TestCA._make_x509_name(common_name=name, parent=issuer.subject) + csr = TestCA._make_csr(subject=subject, + issuer_subject=issuer.certificate.subject, pkey=pkey, + valid_from_delta=valid_from, valid_until_delta=valid_to) + csr = TestCA._add_leaf_usages(csr, domains=domains, issuer=issuer) + cert = csr.sign(private_key=issuer.private_key, + algorithm=hashes.SHA256(), + backend=default_backend()) + return Credentials(name=name, cert=cert, pkey=pkey, issuer=issuer) + + @staticmethod + def _make_client_credentials(name: str, + issuer: Credentials, email: Optional[str], + key_type: Any, + valid_from: timedelta = timedelta(days=-1), + valid_to: timedelta = timedelta(days=89), + ) -> Credentials: + pkey = _private_key(key_type=key_type) + subject = TestCA._make_x509_name(common_name=name, parent=issuer.subject) + csr = TestCA._make_csr(subject=subject, + issuer_subject=issuer.certificate.subject, pkey=pkey, + valid_from_delta=valid_from, valid_until_delta=valid_to) + csr = TestCA._add_client_usages(csr, issuer=issuer, rfc82name=email) + cert = csr.sign(private_key=issuer.private_key, + algorithm=hashes.SHA256(), + backend=default_backend()) + return Credentials(name=name, cert=cert, pkey=pkey, issuer=issuer) diff --git a/tests/tests-httpd/testenv/curl.py b/tests/tests-httpd/testenv/curl.py new file mode 100644 index 00000000000000..1000925d7b991a --- /dev/null +++ b/tests/tests-httpd/testenv/curl.py @@ -0,0 +1,360 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +#*************************************************************************** +# _ _ ____ _ +# Project ___| | | | _ \| | +# / __| | | | |_) | | +# | (__| |_| | _ <| |___ +# \___|\___/|_| \_\_____| +# +# Copyright (C) 2008 - 2022, Daniel Stenberg, , et al. +# +# This software is licensed as described in the file COPYING, which +# you should have received as part of this distribution. The terms +# are also available at https://curl.se/docs/copyright.html. +# +# You may opt to use, copy, modify, merge, publish, distribute and/or sell +# copies of the Software, and permit persons to whom the Software is +# furnished to do so, under the terms of the COPYING file. +# +# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY +# KIND, either express or implied. +# +# SPDX-License-Identifier: curl +# +########################################################################### +# +import json +import logging +import os +import re +import shutil +import subprocess +from datetime import timedelta, datetime +from typing import List, Optional, Dict +from urllib.parse import urlparse + +from .env import Env + + +log = logging.getLogger(__name__) + + +class ExecResult: + + def __init__(self, args: List[str], exit_code: int, + stdout: List[str], stderr: List[str], + duration: Optional[timedelta] = None, + with_stats: bool = False): + self._args = args + self._exit_code = exit_code + self._stdout = stdout + self._stderr = stderr + self._duration = duration if duration is not None else timedelta() + self._response = None + self._responses = [] + self._results = {} + self._assets = [] + self._stats = [] + self._json_out = None + self._with_stats = with_stats + if with_stats: + self._parse_stats() + else: + # noinspection PyBroadException + try: + out = ''.join(self._stdout) + self._json_out = json.loads(out) + except: + pass + + def __repr__(self): + return f"ExecResult[code={self.exit_code}, args={self._args}, stdout={self._stdout}, stderr={self._stderr}]" + + def _parse_stats(self): + self._stats = [] + for l in self._stdout: + try: + self._stats.append(json.loads(l)) + except: + log.error(f'not a JSON stat: {l}') + log.error(f'stdout is: {"".join(self._stdout)}') + break + + @property + def exit_code(self) -> int: + return self._exit_code + + @property + def args(self) -> List[str]: + return self._args + + @property + def outraw(self) -> bytes: + return ''.join(self._stdout).encode() + + @property + def stdout(self) -> str: + return ''.join(self._stdout) + + @property + def json(self) -> Optional[Dict]: + """Output as JSON dictionary or None if not parseable.""" + return self._json_out + + @property + def stderr(self) -> str: + return ''.join(self._stderr) + + @property + def duration(self) -> timedelta: + return self._duration + + @property + def response(self) -> Optional[Dict]: + return self._response + + @property + def responses(self) -> List[Dict]: + return self._responses + + @property + def results(self) -> Dict: + return self._results + + @property + def assets(self) -> List: + return self._assets + + @property + def with_stats(self) -> bool: + return self._with_stats + + @property + def stats(self) -> List: + return self._stats + + @property + def total_connects(self) -> Optional[int]: + if len(self.stats): + n = 0 + for stat in self.stats: + n += stat['num_connects'] + return n + return None + + def add_response(self, resp: Dict): + self._response = resp + self._responses.append(resp) + + def add_results(self, results: Dict): + self._results.update(results) + if 'response' in results: + self.add_response(results['response']) + + def add_assets(self, assets: List): + self._assets.extend(assets) + + def check_responses(self, count: int, exp_status: Optional[int] = None): + if len(self.responses) != count: + seen_queries = [] + for idx, resp in enumerate(self.responses): + assert resp['status'] == 200, f'response #{idx} status: {resp["status"]}' + if 'rquery' not in resp['header']: + log.error(f'response #{idx} missing "rquery": {resp["header"]}') + seen_queries.append(int(resp['header']['rquery'])) + for i in range(0, count-1): + if i not in seen_queries: + log.error(f'response for query {i} missing') + if self.with_stats and len(self.stats) == count: + log.error(f'got all {count} stats, though') + assert len(self.responses) == count, \ + f'response count: expected {count}, got {len(self.responses)}' + if exp_status is not None: + for idx, x in enumerate(self.responses): + assert x['status'] == exp_status, \ + f'response #{idx} unexpectedstatus: {x["status"]}' + if self.with_stats: + assert len(self.stats) == count, f'{self}' + + +class CurlClient: + + ALPN_ARG = { + 'http/0.9': '--http0.9', + 'http/1.0': '--http1.0', + 'http/1.1': '--http1.1', + 'h2': '--http2', + 'h2c': '--http2', + 'h3': '--http3', + } + + def __init__(self, env: Env, run_dir: Optional[str] = None): + self.env = env + self._curl = os.environ['CURL'] if 'CURL' in os.environ else env.curl + self._run_dir = run_dir if run_dir else os.path.join(env.gen_dir, 'curl') + self._stdoutfile = f'{self._run_dir}/curl.stdout' + self._stderrfile = f'{self._run_dir}/curl.stderr' + self._headerfile = f'{self._run_dir}/curl.headers' + self._tracefile = f'{self._run_dir}/curl.trace' + self._log_path = f'{self._run_dir}/curl.log' + self._rmrf(self._run_dir) + self._mkpath(self._run_dir) + + def _rmf(self, path): + if os.path.exists(path): + return os.remove(path) + + def _rmrf(self, path): + if os.path.exists(path): + return shutil.rmtree(path) + + def _mkpath(self, path): + if not os.path.exists(path): + return os.makedirs(path) + + def http_get(self, url: str, extra_args: Optional[List[str]] = None): + return self._raw(url, options=extra_args, with_stats=False) + + def http_download(self, urls: List[str], + alpn_proto: Optional[str] = None, + with_stats: bool = True, + extra_args: List[str] = None): + if extra_args is None: + extra_args = [] + extra_args.extend([ + '-o', 'download.data', + ]) + if with_stats: + extra_args.extend([ + '-w', '%{json}\\n' + ]) + return self._raw(urls, alpn_proto=alpn_proto, options=extra_args, + with_stats=with_stats) + + def _run(self, args, intext='', with_stats: bool = False): + self._rmf(self._stdoutfile) + self._rmf(self._stderrfile) + self._rmf(self._headerfile) + self._rmf(self._tracefile) + start = datetime.now() + with open(self._stdoutfile, 'w') as cout: + with open(self._stderrfile, 'w') as cerr: + p = subprocess.run(args, stderr=cerr, stdout=cout, + cwd=self._run_dir, shell=False, + input=intext.encode() if intext else None) + coutput = open(self._stdoutfile).readlines() + cerrput = open(self._stderrfile).readlines() + return ExecResult(args=args, exit_code=p.returncode, + stdout=coutput, stderr=cerrput, + duration=datetime.now() - start, + with_stats=with_stats) + + def _raw(self, urls, timeout=10, options=None, insecure=False, + alpn_proto: Optional[str] = None, + force_resolve=True, with_stats=False): + args = self._complete_args( + urls=urls, timeout=timeout, options=options, insecure=insecure, + alpn_proto=alpn_proto, force_resolve=force_resolve) + r = self._run(args, with_stats=with_stats) + if r.exit_code == 0: + self._parse_headerfile(self._headerfile, r=r) + if r.json: + r.response["json"] = r.json + return r + + def _complete_args(self, urls, timeout=None, options=None, + insecure=False, force_resolve=True, + alpn_proto: Optional[str] = None): + if not isinstance(urls, list): + urls = [urls] + + args = [ + self._curl, "-s", "--path-as-is", "-D", self._headerfile, + ] + if self.env.verbose > 2: + args.extend(['--trace', self._tracefile]) + + for url in urls: + u = urlparse(urls[0]) + if alpn_proto is not None: + if alpn_proto not in self.ALPN_ARG: + raise Exception(f'unknown ALPN protocol: "{alpn_proto}"') + args.append(self.ALPN_ARG[alpn_proto]) + + if u.scheme == 'http': + pass + elif insecure: + args.append('--insecure') + elif options and "--cacert" in options: + pass + elif u.hostname: + args.extend(["--cacert", self.env.ca.cert_file]) + + if force_resolve and u.hostname and u.hostname != 'localhost' \ + and not re.match(r'^(\d+|\[|:).*', u.hostname): + port = u.port if u.port else 443 + args.extend(["--resolve", f"{u.hostname}:{port}:127.0.0.1"]) + if timeout is not None and int(timeout) > 0: + args.extend(["--connect-timeout", str(int(timeout))]) + if options: + args.extend(options) + args.append(url) + return args + + def _parse_headerfile(self, headerfile: str, r: ExecResult = None) -> ExecResult: + lines = open(headerfile).readlines() + if r is None: + r = ExecResult(args=[], exit_code=0, stdout=[], stderr=[]) + + response = None + + def fin_response(resp): + if resp: + r.add_response(resp) + + expected = ['status'] + for line in lines: + line = line.strip() + if re.match(r'^$', line): + if 'trailer' in expected: + # end of trailers + fin_response(response) + response = None + expected = ['status'] + elif 'header' in expected: + # end of header, another status or trailers might follow + expected = ['status', 'trailer'] + else: + assert False, f"unexpected line: '{line}'" + continue + if 'status' in expected: + # log.debug("reading 1st response line: %s", line) + m = re.match(r'^(\S+) (\d+)( .*)?$', line) + if m: + fin_response(response) + response = { + "protocol": m.group(1), + "status": int(m.group(2)), + "description": m.group(3), + "header": {}, + "trailer": {}, + "body": r.outraw + } + expected = ['header'] + continue + if 'trailer' in expected: + m = re.match(r'^([^:]+):\s*(.*)$', line) + if m: + response['trailer'][m.group(1).lower()] = m.group(2) + continue + if 'header' in expected: + m = re.match(r'^([^:]+):\s*(.*)$', line) + if m: + response['header'][m.group(1).lower()] = m.group(2) + continue + assert False, f"unexpected line: '{line}, expected: {expected}'" + + fin_response(response) + return r + diff --git a/tests/tests-httpd/testenv/env.py b/tests/tests-httpd/testenv/env.py new file mode 100644 index 00000000000000..a5f46cdcb99289 --- /dev/null +++ b/tests/tests-httpd/testenv/env.py @@ -0,0 +1,243 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +#*************************************************************************** +# _ _ ____ _ +# Project ___| | | | _ \| | +# / __| | | | |_) | | +# | (__| |_| | _ <| |___ +# \___|\___/|_| \_\_____| +# +# Copyright (C) 2008 - 2022, Daniel Stenberg, , et al. +# +# This software is licensed as described in the file COPYING, which +# you should have received as part of this distribution. The terms +# are also available at https://curl.se/docs/copyright.html. +# +# You may opt to use, copy, modify, merge, publish, distribute and/or sell +# copies of the Software, and permit persons to whom the Software is +# furnished to do so, under the terms of the COPYING file. +# +# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY +# KIND, either express or implied. +# +# SPDX-License-Identifier: curl +# +########################################################################### +# +import logging +import os +import re +import subprocess +from configparser import ConfigParser, ExtendedInterpolation +from typing import Optional + +from .certs import CertificateSpec, TestCA, Credentials + +log = logging.getLogger(__name__) + + +def init_config_from(conf_path): + if os.path.isfile(conf_path): + config = ConfigParser(interpolation=ExtendedInterpolation()) + config.read(conf_path) + return config + return None + + +TESTS_HTTPD_PATH = os.path.dirname(os.path.dirname(__file__)) +DEF_CONFIG = init_config_from(os.path.join(TESTS_HTTPD_PATH, 'config.ini')) + +TOP_PATH = os.path.dirname(os.path.dirname(TESTS_HTTPD_PATH)) +CURL = os.path.join(TOP_PATH, 'src/curl') + + +class EnvConfig: + + def __init__(self): + self.tests_dir = TESTS_HTTPD_PATH + self.gen_dir = os.path.join(self.tests_dir, 'gen') + self.config = DEF_CONFIG + # check cur and its features + self.curl = CURL + self.curl_features = [] + self.curl_protos = [] + p = subprocess.run(args=[self.curl, '-V'], + capture_output=True, text=True) + if p.returncode != 0: + assert False, f'{self.curl} -V failed with exit code: {p.returncode}' + for l in p.stdout.splitlines(keepends=False): + if l.startswith('Features: '): + self.curl_features = [feat.lower() for feat in l[10:].split(' ')] + if l.startswith('Protocols: '): + self.curl_protos = [prot.lower() for prot in l[11:].split(' ')] + self.nghttpx_with_h3 = re.match(r'.* nghttp3/.*', p.stdout.strip()) + log.error(f'nghttpx -v: {p.stdout}') + + self.http_port = self.config['test']['http_port'] + self.https_port = self.config['test']['https_port'] + self.h3_port = self.config['test']['h3_port'] + self.httpd = self.config['httpd']['httpd'] + self.apachectl = self.config['httpd']['apachectl'] + self.apxs = self.config['httpd']['apxs'] + if len(self.apxs) == 0: + self.apxs = None + + self.examples_pem = { + 'key': 'xxx', + 'cert': 'xxx', + } + self.htdocs_dir = os.path.join(self.gen_dir, 'htdocs') + self.tld = 'tests-httpd.curl.se' + self.domain1 = f"one.{self.tld}" + self.domain2 = f"two.{self.tld}" + self.cert_specs = [ + CertificateSpec(domains=[self.domain1], key_type='rsa2048'), + CertificateSpec(domains=[self.domain2], key_type='rsa2048'), + CertificateSpec(name="clientsX", sub_specs=[ + CertificateSpec(name="user1", client=True), + ]), + ] + + self.nghttpx = self.config['nghttpx']['nghttpx'] + self.nghttpx_with_h3 = False + if len(self.nghttpx) == 0: + self.nghttpx = 'nghttpx' + if self.nghttpx is not None: + p = subprocess.run(args=[self.nghttpx, '-v'], + capture_output=True, text=True) + if p.returncode != 0: + # not a working nghttpx + self.nghttpx = None + else: + self.nghttpx_with_h3 = re.match(r'.* nghttp3/.*', p.stdout.strip()) is not None + log.error(f'nghttpx -v: {p.stdout}') + + def is_complete(self) -> bool: + return os.path.isfile(self.httpd) and \ + os.path.isfile(self.apachectl) and \ + self.apxs is not None and \ + os.path.isfile(self.apxs) + + def get_incomplete_reason(self) -> Optional[str]: + if not os.path.isfile(self.httpd): + return f'httpd ({self.httpd}) not found' + if not os.path.isfile(self.apachectl): + return f'apachectl ({self.apachectl}) not found' + if self.apxs is None: + return f"apxs (provided by apache2-dev) not found" + if not os.path.isfile(self.apxs): + return f"apxs ({self.apxs}) not found" + return None + + +class Env: + + CONFIG = EnvConfig() + + @staticmethod + def setup_incomplete() -> bool: + return not Env.CONFIG.is_complete() + + @staticmethod + def incomplete_reason() -> Optional[str]: + return Env.CONFIG.get_incomplete_reason() + + @staticmethod + def have_h3_server() -> bool: + return Env.CONFIG.nghttpx_with_h3 + + @staticmethod + def have_h3_curl() -> bool: + return 'http3' in Env.CONFIG.curl_features + + @staticmethod + def have_h3() -> bool: + return Env.have_h3_curl() and Env.have_h3_server() + + def __init__(self, pytestconfig=None): + self._verbose = pytestconfig.option.verbose \ + if pytestconfig is not None else 0 + self._ca = None + + def issue_certs(self): + if self._ca is None: + ca_dir = os.path.join(self.CONFIG.gen_dir, 'ca') + self._ca = TestCA.create_root(name=self.CONFIG.tld, + store_dir=ca_dir, + key_type="rsa2048") + self._ca.issue_certs(self.CONFIG.cert_specs) + + def setup(self): + os.makedirs(self.gen_dir, exist_ok=True) + os.makedirs(self.htdocs_dir, exist_ok=True) + self.issue_certs() + + def get_credentials(self, domain) -> Optional[Credentials]: + creds = self.ca.get_credentials_for_name(domain) + if len(creds) > 0: + return creds[0] + return None + + @property + def verbose(self) -> int: + return self._verbose + + @property + def gen_dir(self) -> str: + return self.CONFIG.gen_dir + + @property + def ca(self): + return self._ca + + @property + def htdocs_dir(self) -> str: + return self.CONFIG.htdocs_dir + + @property + def domain1(self) -> str: + return self.CONFIG.domain1 + + @property + def domain2(self) -> str: + return self.CONFIG.domain2 + + @property + def http_port(self) -> str: + return self.CONFIG.http_port + + @property + def https_port(self) -> str: + return self.CONFIG.https_port + + @property + def h3_port(self) -> str: + return self.CONFIG.h3_port + + @property + def curl(self) -> str: + return self.CONFIG.curl + + @property + def httpd(self) -> str: + return self.CONFIG.httpd + + @property + def apachectl(self) -> str: + return self.CONFIG.apachectl + + @property + def apxs(self) -> str: + return self.CONFIG.apxs + + @property + def nghttpx(self) -> Optional[str]: + return self.CONFIG.nghttpx + + def authority_for(self, domain: str, alpn_proto: Optional[str] = None): + if alpn_proto is None or \ + alpn_proto in ['h2', 'http/1.1', 'http/1.0', 'http/0.9']: + return f'{domain}:{self.https_port}' + if alpn_proto in ['h3']: + return f'{domain}:{self.h3_port}' + return f'{domain}:{self.http_port}' diff --git a/tests/tests-httpd/testenv/httpd.py b/tests/tests-httpd/testenv/httpd.py new file mode 100644 index 00000000000000..3fb7b4a72b7147 --- /dev/null +++ b/tests/tests-httpd/testenv/httpd.py @@ -0,0 +1,288 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +#*************************************************************************** +# _ _ ____ _ +# Project ___| | | | _ \| | +# / __| | | | |_) | | +# | (__| |_| | _ <| |___ +# \___|\___/|_| \_\_____| +# +# Copyright (C) 2008 - 2022, Daniel Stenberg, , et al. +# +# This software is licensed as described in the file COPYING, which +# you should have received as part of this distribution. The terms +# are also available at https://curl.se/docs/copyright.html. +# +# You may opt to use, copy, modify, merge, publish, distribute and/or sell +# copies of the Software, and permit persons to whom the Software is +# furnished to do so, under the terms of the COPYING file. +# +# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY +# KIND, either express or implied. +# +# SPDX-License-Identifier: curl +# +########################################################################### +# +import inspect +import logging +import os +import subprocess +from datetime import timedelta, datetime +from json import JSONEncoder +import time +from typing import List + +from .curl import CurlClient, ExecResult +from .env import Env + + +log = logging.getLogger(__name__) + + +class Httpd: + + MODULES = [ + 'log_config', 'logio', 'unixd', 'version', 'watchdog', + 'authn_core', 'authz_user', 'authz_core', + 'env', 'filter', 'headers', 'mime', + 'rewrite', 'http2', 'ssl', + 'mpm_event', + ] + COMMON_MODULES_DIRS = [ + '/usr/lib/apache2/modules', # debian + '/usr/libexec/apache2/', # macos + ] + + MOD_CURLTEST = None + + def __init__(self, env: Env): + self.env = env + self._cmd = env.apachectl + self._apache_dir = os.path.join(env.gen_dir, 'apache') + self._docs_dir = os.path.join(self._apache_dir, 'docs') + self._conf_dir = os.path.join(self._apache_dir, 'conf') + self._conf_file = os.path.join(self._conf_dir, 'test.conf') + self._logs_dir = os.path.join(self._apache_dir, 'logs') + self._error_log = os.path.join(self._logs_dir, 'error_log') + self._tmp_dir = os.path.join(self._apache_dir, 'tmp') + self._mods_dir = None + if env.apxs is not None: + p = subprocess.run(args=[env.apxs, '-q', 'libexecdir'], + capture_output=True, text=True) + if p.returncode != 0: + raise Exception(f'{env.apxs} failed to query libexecdir: {p}') + self._mods_dir = p.stdout.strip() + else: + for md in self.COMMON_MODULES_DIRS: + if os.path.isdir(md): + self._mods_dir = md + if self._mods_dir is None: + raise Exception(f'apache modules dir cannot be found') + self._process = None + self._rmf(self._error_log) + self._init_curltest() + + def clear_logs(self): + self._rmf(self._error_log) + + def exists(self): + return os.path.exists(self._cmd) + + def _run(self, args, intext=''): + p = subprocess.run(args, stderr=subprocess.PIPE, stdout=subprocess.PIPE, + cwd=self.env.gen_dir, + input=intext.encode() if intext else None) + start = datetime.now() + return ExecResult(args=args, exit_code=p.returncode, + stdout=p.stdout.decode().splitlines(), + stderr=p.stderr.decode().splitlines(), + duration=datetime.now() - start) + + def _apachectl(self, cmd: str): + args = [self.env.apachectl, + "-d", self._apache_dir, + "-f", self._conf_file, + "-k", cmd] + return self._run(args=args) + + def start(self): + if self._process: + self.stop() + self._write_config() + with open(self._error_log, 'a') as fd: + fd.write('start of server\n') + with open(os.path.join(self._apache_dir, 'xxx'), 'a') as fd: + fd.write('start of server\n') + r = self._apachectl('start') + if r.exit_code != 0: + log.error(f'failed to start httpd: {r}') + return self.wait_live(timeout=timedelta(seconds=5)) + + def stop(self): + r = self._apachectl('stop') + if r.exit_code == 0: + return self.wait_dead(timeout=timedelta(seconds=5)) + return r.exit_code == 0 + + def restart(self): + self.stop() + return self.start() + + def reload(self): + r = self._apachectl("graceful") + if r.exit_code != 0: + log.error(f'failed to reload httpd: {r}') + return self.wait_live(timeout=timedelta(seconds=5)) + + def wait_dead(self, timeout: timedelta): + curl = CurlClient(env=self.env, run_dir=self._tmp_dir) + try_until = datetime.now() + timeout + while datetime.now() < try_until: + r = curl.http_get(url=f'http://{self.env.domain1}:{self.env.http_port}/') + if r.exit_code != 0: + return True + time.sleep(.1) + log.debug(f"Server still responding after {timeout}") + return False + + def wait_live(self, timeout: timedelta): + curl = CurlClient(env=self.env, run_dir=self._tmp_dir) + try_until = datetime.now() + timeout + while datetime.now() < try_until: + r = curl.http_get(url=f'http://{self.env.domain1}:{self.env.http_port}/') + if r.exit_code == 0: + return True + time.sleep(.1) + log.debug(f"Server still not responding after {timeout}") + return False + + def _rmf(self, path): + if os.path.exists(path): + return os.remove(path) + + def _mkpath(self, path): + if not os.path.exists(path): + return os.makedirs(path) + + def _write_config(self): + domain1 = self.env.domain1 + creds1 = self.env.get_credentials(domain1) + domain2 = self.env.domain2 + creds2 = self.env.get_credentials(domain2) + self._mkpath(self._conf_dir) + self._mkpath(self._logs_dir) + self._mkpath(self._tmp_dir) + self._mkpath(os.path.join(self._docs_dir, 'two')) + with open(os.path.join(self._docs_dir, 'data.json'), 'w') as fd: + data = { + 'server': f'{domain1}', + } + fd.write(JSONEncoder().encode(data)) + with open(os.path.join(self._docs_dir, 'two/data.json'), 'w') as fd: + data = { + 'server': f'{domain2}', + } + fd.write(JSONEncoder().encode(data)) + with open(self._conf_file, 'w') as fd: + for m in self.MODULES: + if os.path.exists(os.path.join(self._mods_dir, f'mod_{m}.so')): + fd.write(f'LoadModule {m}_module "{self._mods_dir}/mod_{m}.so"\n') + if Httpd.MOD_CURLTEST is not None: + fd.write(f'LoadModule curltest_module \"{Httpd.MOD_CURLTEST}\"\n') + conf = [ # base server config + f'ServerRoot "{self._apache_dir}"', + f'DefaultRuntimeDir logs', + f'PidFile httpd.pid', + f'ErrorLog {self._error_log}', + f'LogLevel {self._get_log_level()}', + f'LogLevel http:trace4', + f'H2MinWorkers 16', + f'H2MaxWorkers 128', + f'Listen {self.env.http_port}', + f'Listen {self.env.https_port}', + f'TypesConfig "{self._conf_dir}/mime.types', + # we want the quest string in a response header, so we + # can check responses more easily + f'Header set rquery "%{{QUERY_STRING}}s"', + ] + conf.extend([ # plain http host for domain1 + f'', + f' ServerName {domain1}', + f' DocumentRoot "{self._docs_dir}"', + ]) + conf.extend(self._curltest_conf()) + conf.extend([ + f'', + f'', + ]) + conf.extend([ # https host for domain1, h1 + h2 + f'', + f' ServerName {domain1}', + f' Protocols h2 http/1.1', + f' SSLEngine on', + f' SSLCertificateFile {creds1.cert_file}', + f' SSLCertificateKeyFile {creds1.pkey_file}', + f' DocumentRoot "{self._docs_dir}"', + ]) + conf.extend(self._curltest_conf()) + conf.extend([ + f'', + f'', + ]) + conf.extend([ # https host for domain2, no h2 + f'', + f' ServerName {domain2}', + f' Protocols http/1.1', + f' SSLEngine on', + f' SSLCertificateFile {creds2.cert_file}', + f' SSLCertificateKeyFile {creds2.pkey_file}', + f' DocumentRoot "{self._docs_dir}/two"', + ]) + conf.extend(self._curltest_conf()) + conf.extend([ + f'', + f'', + ]) + fd.write("\n".join(conf)) + with open(os.path.join(self._conf_dir, 'mime.types'), 'w') as fd: + fd.write("\n".join([ + 'text/html html', + 'application/json json', + '' + ])) + + def _get_log_level(self): + if self.env.verbose > 3: + return 'trace2' + if self.env.verbose > 2: + return 'trace1' + if self.env.verbose > 1: + return 'debug' + return 'info' + + def _curltest_conf(self) -> List[str]: + if Httpd.MOD_CURLTEST is not None: + return [ + f' ', + f' SetHandler curltest-echo', + f' ', + f' ', + f' SetHandler curltest-tweak', + f' ', + ] + return [] + + def _init_curltest(self): + if Httpd.MOD_CURLTEST is not None: + return + local_dir = os.path.dirname(inspect.getfile(Httpd)) + p = subprocess.run([self.env.apxs, '-c', 'mod_curltest.c'], + capture_output=True, + cwd=os.path.join(local_dir, 'mod_curltest')) + rv = p.returncode + if rv != 0: + log.error(f"compiling mod_curltest failed: {p.stderr}") + raise Exception(f"compiling mod_curltest failed: {p.stderr}") + Httpd.MOD_CURLTEST = os.path.join( + local_dir, 'mod_curltest/.libs/mod_curltest.so') diff --git a/tests/tests-httpd/testenv/mod_curltest/.gitignore b/tests/tests-httpd/testenv/mod_curltest/.gitignore new file mode 100644 index 00000000000000..06186631a06072 --- /dev/null +++ b/tests/tests-httpd/testenv/mod_curltest/.gitignore @@ -0,0 +1,5 @@ +# Copyright (C) Daniel Stenberg, , et al. +# +# SPDX-License-Identifier: curl + +*.slo \ No newline at end of file diff --git a/tests/tests-httpd/testenv/mod_curltest/mod_curltest.c b/tests/tests-httpd/testenv/mod_curltest/mod_curltest.c new file mode 100644 index 00000000000000..10522a8d11d300 --- /dev/null +++ b/tests/tests-httpd/testenv/mod_curltest/mod_curltest.c @@ -0,0 +1,411 @@ +/*************************************************************************** + * _ _ ____ _ + * Project ___| | | | _ \| | + * / __| | | | |_) | | + * | (__| |_| | _ <| |___ + * \___|\___/|_| \_\_____| + * + * Copyright (C) Daniel Stenberg, , et al. + * + * This software is licensed as described in the file COPYING, which + * you should have received as part of this distribution. The terms + * are also available at https://curl.se/docs/copyright.html. + * + * You may opt to use, copy, modify, merge, publish, distribute and/or sell + * copies of the Software, and permit persons to whom the Software is + * furnished to do so, under the terms of the COPYING file. + * + * This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY + * KIND, either express or implied. + * + * SPDX-License-Identifier: curl + * + ***************************************************************************/ +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include + +static void curltest_hooks(apr_pool_t *pool); +static int curltest_echo_handler(request_rec *r); +static int curltest_tweak_handler(request_rec *r); + +AP_DECLARE_MODULE(curltest) = { + STANDARD20_MODULE_STUFF, + NULL, /* func to create per dir config */ + NULL, /* func to merge per dir config */ + NULL, /* func to create per server config */ + NULL, /* func to merge per server config */ + NULL, /* command handlers */ + curltest_hooks, +#if defined(AP_MODULE_FLAG_NONE) + AP_MODULE_FLAG_ALWAYS_MERGE +#endif +}; + +static int curltest_post_config(apr_pool_t *p, apr_pool_t *plog, + apr_pool_t *ptemp, server_rec *s) +{ + void *data = NULL; + const char *key = "mod_curltest_init_counter"; + + (void)plog;(void)ptemp; + + apr_pool_userdata_get(&data, key, s->process->pool); + if(!data) { + /* dry run */ + apr_pool_userdata_set((const void *)1, key, + apr_pool_cleanup_null, s->process->pool); + return APR_SUCCESS; + } + + /* mess with the overall server here */ + + return APR_SUCCESS; +} + +static void curltest_hooks(apr_pool_t *pool) +{ + ap_log_perror(APLOG_MARK, APLOG_TRACE1, 0, pool, "installing hooks"); + + /* Run once after configuration is set, but before mpm children initialize. + */ + ap_hook_post_config(curltest_post_config, NULL, NULL, APR_HOOK_MIDDLE); + + /* curl test handlers */ + ap_hook_handler(curltest_echo_handler, NULL, NULL, APR_HOOK_MIDDLE); + ap_hook_handler(curltest_tweak_handler, NULL, NULL, APR_HOOK_MIDDLE); +} + +#define SECS_PER_HOUR (60*60) +#define SECS_PER_DAY (24*SECS_PER_HOUR) + +static apr_status_t duration_parse(apr_interval_time_t *ptimeout, const char *value, + const char *def_unit) +{ + char *endp; + apr_int64_t n; + + n = apr_strtoi64(value, &endp, 10); + if(errno) { + return errno; + } + if(!endp || !*endp) { + if (!def_unit) def_unit = "s"; + } + else if(endp == value) { + return APR_EINVAL; + } + else { + def_unit = endp; + } + + switch(*def_unit) { + case 'D': + case 'd': + *ptimeout = apr_time_from_sec(n * SECS_PER_DAY); + break; + case 's': + case 'S': + *ptimeout = (apr_interval_time_t) apr_time_from_sec(n); + break; + case 'h': + case 'H': + /* Time is in hours */ + *ptimeout = (apr_interval_time_t) apr_time_from_sec(n * SECS_PER_HOUR); + break; + case 'm': + case 'M': + switch(*(++def_unit)) { + /* Time is in milliseconds */ + case 's': + case 'S': + *ptimeout = (apr_interval_time_t) n * 1000; + break; + /* Time is in minutes */ + case 'i': + case 'I': + *ptimeout = (apr_interval_time_t) apr_time_from_sec(n * 60); + break; + default: + return APR_EGENERAL; + } + break; + case 'u': + case 'U': + switch(*(++def_unit)) { + /* Time is in microseconds */ + case 's': + case 'S': + *ptimeout = (apr_interval_time_t) n; + break; + default: + return APR_EGENERAL; + } + break; + default: + return APR_EGENERAL; + } + return APR_SUCCESS; +} + +static int status_from_str(const char *s, apr_status_t *pstatus) +{ + if(!strcmp("timeout", s)) { + *pstatus = APR_TIMEUP; + return 1; + } + else if(!strcmp("reset", s)) { + *pstatus = APR_ECONNRESET; + return 1; + } + return 0; +} + +static int curltest_echo_handler(request_rec *r) +{ + conn_rec *c = r->connection; + apr_bucket_brigade *bb; + apr_bucket *b; + apr_status_t rv; + char buffer[8192]; + const char *ct; + long l; + + if(strcmp(r->handler, "curltest-echo")) { + return DECLINED; + } + if(r->method_number != M_GET && r->method_number != M_POST) { + return DECLINED; + } + + ap_log_rerror(APLOG_MARK, APLOG_TRACE1, 0, r, "echo_handler: processing"); + r->status = 200; + r->clength = -1; + r->chunked = 1; + apr_table_unset(r->headers_out, "Content-Length"); + /* Discourage content-encodings */ + apr_table_unset(r->headers_out, "Content-Encoding"); + apr_table_setn(r->subprocess_env, "no-brotli", "1"); + apr_table_setn(r->subprocess_env, "no-gzip", "1"); + + ct = apr_table_get(r->headers_in, "content-type"); + ap_set_content_type(r, ct? ct : "application/octet-stream"); + + bb = apr_brigade_create(r->pool, c->bucket_alloc); + /* copy any request body into the response */ + if((rv = ap_setup_client_block(r, REQUEST_CHUNKED_DECHUNK))) goto cleanup; + if(ap_should_client_block(r)) { + while(0 < (l = ap_get_client_block(r, &buffer[0], sizeof(buffer)))) { + ap_log_rerror(APLOG_MARK, APLOG_TRACE1, 0, r, + "echo_handler: copying %ld bytes from request body", l); + rv = apr_brigade_write(bb, NULL, NULL, buffer, l); + if (APR_SUCCESS != rv) goto cleanup; + rv = ap_pass_brigade(r->output_filters, bb); + if (APR_SUCCESS != rv) goto cleanup; + ap_log_rerror(APLOG_MARK, APLOG_TRACE1, 0, r, + "echo_handler: passed %ld bytes from request body", l); + } + } + /* we are done */ + b = apr_bucket_eos_create(c->bucket_alloc); + APR_BRIGADE_INSERT_TAIL(bb, b); + ap_log_rerror(APLOG_MARK, APLOG_TRACE1, 0, r, "echo_handler: request read"); + + if(r->trailers_in && !apr_is_empty_table(r->trailers_in)) { + ap_log_rerror(APLOG_MARK, APLOG_TRACE2, 0, r, + "echo_handler: seeing incoming trailers"); + apr_table_setn(r->trailers_out, "h2test-trailers-in", + apr_itoa(r->pool, 1)); + } + + rv = ap_pass_brigade(r->output_filters, bb); + +cleanup: + if(rv == APR_SUCCESS + || r->status != HTTP_OK + || c->aborted) { + ap_log_rerror(APLOG_MARK, APLOG_TRACE1, rv, r, "echo_handler: done"); + return OK; + } + else { + /* no way to know what type of error occurred */ + ap_log_rerror(APLOG_MARK, APLOG_TRACE1, rv, r, "echo_handler failed"); + return AP_FILTER_ERROR; + } + return DECLINED; +} + +static int curltest_tweak_handler(request_rec *r) +{ + conn_rec *c = r->connection; + apr_bucket_brigade *bb; + apr_bucket *b; + apr_status_t rv; + char buffer[16*1024]; + int i, chunks = 3, error_bucket = 1; + size_t chunk_size = sizeof(buffer); + const char *request_id = "none"; + apr_time_t delay = 0, chunk_delay = 0; + apr_array_header_t *args = NULL; + int http_status = 200; + apr_status_t error = APR_SUCCESS, body_error = APR_SUCCESS; + + if(strcmp(r->handler, "curltest-tweak")) { + return DECLINED; + } + if(r->method_number != M_GET && r->method_number != M_POST) { + return DECLINED; + } + + if(r->args) { + args = apr_cstr_split(r->args, "&", 1, r->pool); + for(i = 0; i < args->nelts; ++i) { + char *s, *val, *arg = APR_ARRAY_IDX(args, i, char*); + s = strchr(arg, '='); + if(s) { + *s = '\0'; + val = s + 1; + if(!strcmp("status", arg)) { + http_status = (int)apr_atoi64(val); + if(http_status > 0) { + continue; + } + } + else if(!strcmp("chunks", arg)) { + chunks = (int)apr_atoi64(val); + if(chunks >= 0) { + continue; + } + } + else if(!strcmp("chunk_size", arg)) { + chunk_size = (int)apr_atoi64(val); + if(chunk_size >= 0) { + if(chunk_size > sizeof(buffer)) { + ap_log_rerror(APLOG_MARK, APLOG_ERR, 0, r, + "chunk_size %zu too large", chunk_size); + ap_die(HTTP_BAD_REQUEST, r); + return OK; + } + continue; + } + } + else if(!strcmp("id", arg)) { + /* just an id for repeated requests with curl's url globbing */ + request_id = val; + continue; + } + else if(!strcmp("error", arg)) { + if(status_from_str(val, &error)) { + continue; + } + } + else if(!strcmp("error_bucket", arg)) { + error_bucket = (int)apr_atoi64(val); + if(error_bucket >= 0) { + continue; + } + } + else if(!strcmp("body_error", arg)) { + if(status_from_str(val, &body_error)) { + continue; + } + } + else if(!strcmp("delay", arg)) { + rv = duration_parse(&delay, val, "s"); + if(APR_SUCCESS == rv) { + continue; + } + } + else if(!strcmp("chunk_delay", arg)) { + rv = duration_parse(&chunk_delay, val, "s"); + if(APR_SUCCESS == rv) { + continue; + } + } + } + ap_log_rerror(APLOG_MARK, APLOG_ERR, 0, r, "query parameter not " + "understood: '%s' in %s", + arg, r->args); + ap_die(HTTP_BAD_REQUEST, r); + return OK; + } + } + + ap_log_rerror(APLOG_MARK, APLOG_TRACE1, 0, r, "error_handler: processing " + "request, %s", r->args? r->args : "(no args)"); + r->status = http_status; + r->clength = -1; + r->chunked = 1; + apr_table_setn(r->headers_out, "request-id", request_id); + apr_table_unset(r->headers_out, "Content-Length"); + /* Discourage content-encodings */ + apr_table_unset(r->headers_out, "Content-Encoding"); + apr_table_setn(r->subprocess_env, "no-brotli", "1"); + apr_table_setn(r->subprocess_env, "no-gzip", "1"); + + ap_set_content_type(r, "application/octet-stream"); + bb = apr_brigade_create(r->pool, c->bucket_alloc); + + if(delay) { + apr_sleep(delay); + } + if(error != APR_SUCCESS) { + return ap_map_http_request_error(error, HTTP_BAD_REQUEST); + } + /* flush response */ + b = apr_bucket_flush_create(c->bucket_alloc); + APR_BRIGADE_INSERT_TAIL(bb, b); + rv = ap_pass_brigade(r->output_filters, bb); + if (APR_SUCCESS != rv) goto cleanup; + + memset(buffer, 'X', sizeof(buffer)); + for(i = 0; i < chunks; ++i) { + if(chunk_delay) { + apr_sleep(chunk_delay); + } + rv = apr_brigade_write(bb, NULL, NULL, buffer, chunk_size); + if(APR_SUCCESS != rv) goto cleanup; + rv = ap_pass_brigade(r->output_filters, bb); + if(APR_SUCCESS != rv) goto cleanup; + ap_log_rerror(APLOG_MARK, APLOG_TRACE1, 0, r, + "error_handler: passed %lu bytes as response body", + (unsigned long)chunk_size); + if(body_error != APR_SUCCESS) { + rv = body_error; + goto cleanup; + } + } + /* we are done */ + b = apr_bucket_eos_create(c->bucket_alloc); + APR_BRIGADE_INSERT_TAIL(bb, b); + rv = ap_pass_brigade(r->output_filters, bb); + apr_brigade_cleanup(bb); + ap_log_rerror(APLOG_MARK, APLOG_TRACE1, rv, r, + "error_handler: response passed"); + +cleanup: + ap_log_rerror(APLOG_MARK, APLOG_TRACE1, rv, r, + "error_handler: request cleanup, r->status=%d, aborted=%d", + r->status, c->aborted); + if(rv == APR_SUCCESS) { + return OK; + } + if(error_bucket && 0) { + http_status = ap_map_http_request_error(rv, HTTP_BAD_REQUEST); + b = ap_bucket_error_create(http_status, NULL, r->pool, c->bucket_alloc); + ap_log_rerror(APLOG_MARK, APLOG_TRACE1, rv, r, + "error_handler: passing error bucket, status=%d", + http_status); + APR_BRIGADE_INSERT_TAIL(bb, b); + ap_pass_brigade(r->output_filters, bb); + } + return AP_FILTER_ERROR; +} diff --git a/tests/tests-httpd/testenv/nghttpx.py b/tests/tests-httpd/testenv/nghttpx.py new file mode 100644 index 00000000000000..99ccecf1ef2b59 --- /dev/null +++ b/tests/tests-httpd/testenv/nghttpx.py @@ -0,0 +1,125 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +#*************************************************************************** +# _ _ ____ _ +# Project ___| | | | _ \| | +# / __| | | | |_) | | +# | (__| |_| | _ <| |___ +# \___|\___/|_| \_\_____| +# +# Copyright (C) 2008 - 2022, Daniel Stenberg, , et al. +# +# This software is licensed as described in the file COPYING, which +# you should have received as part of this distribution. The terms +# are also available at https://curl.se/docs/copyright.html. +# +# You may opt to use, copy, modify, merge, publish, distribute and/or sell +# copies of the Software, and permit persons to whom the Software is +# furnished to do so, under the terms of the COPYING file. +# +# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY +# KIND, either express or implied. +# +# SPDX-License-Identifier: curl +# +########################################################################### +# +import datetime +import logging +import os +import signal +import subprocess +import time +from typing import Optional + +from .env import Env + + +log = logging.getLogger(__name__) + + +class Nghttpx: + + def __init__(self, env: Env): + self.env = env + self._cmd = env.nghttpx + self._pid_file = os.path.join(env.gen_dir, 'nghttpx.pid') + self._conf_file = os.path.join(env.gen_dir, 'nghttpx.conf') + self._error_log = os.path.join(env.gen_dir, 'nghttpx.log') + self._stderr = os.path.join(env.gen_dir, 'nghttpx.stderr') + self._process = None + self._process: Optional[subprocess.Popen] = None + + def exists(self): + return os.path.exists(self._cmd) + + def clear_logs(self): + self._rmf(self._error_log) + self._rmf(self._stderr) + + def is_running(self): + if self._process: + self._process.poll() + return self._process.returncode is None + return False + + def start(self): + if self._process: + self.stop() + self._write_config() + args = [ + self._cmd, + f'--frontend=*,{self.env.h3_port};quic', + f'--backend=127.0.0.1,{self.env.https_port};{self.env.domain1};sni={self.env.domain1};proto=h2;tls', + f'--backend=127.0.0.1,{self.env.http_port}', + f'--log-level=INFO', + f'--pid-file={self._pid_file}', + f'--errorlog-file={self._error_log}', + f'--conf={self._conf_file}', + f'--cacert={self.env.ca.cert_file}', + self.env.get_credentials(self.env.domain1).pkey_file, + self.env.get_credentials(self.env.domain1).cert_file, + ] + ngerr = open(self._stderr, 'a') + self._process = subprocess.Popen(args=args, stderr=ngerr) + return self._process.returncode is None + + def stop(self): + if self._process: + self._process.terminate() + self._process.wait(timeout=2) + self._process = None + return True + + def restart(self): + self.stop() + return self.start() + + def reload(self, timeout: datetime.timedelta): + if self._process: + running = self._process + os.kill(running.pid, signal.SIGQUIT) + self.start() + try: + log.debug(f'waiting for nghttpx({running.pid}) to exit.') + running.wait(timeout=timeout.seconds) + log.debug(f'nghttpx({running.pid}) terminated -> {running.returncode}') + return True + except subprocess.TimeoutExpired: + log.error(f'SIGQUIT nghttpx({running.pid}), but did not shut down.') + return False + + def _rmf(self, path): + if os.path.exists(path): + return os.remove(path) + + def _mkpath(self, path): + if not os.path.exists(path): + return os.makedirs(path) + + def _write_config(self): + with open(self._conf_file, 'w') as fd: + fd.write(f'# nghttpx test config'), + fd.write("\n".join([ + '# do we need something here?' + ]))