Skip to content

Commit

Permalink
Merge pull request #11840 from azat/fix-http-memory-accounting
Browse files Browse the repository at this point in the history
Fix memory accounting via HTTP interface

(cherry picked from commit b447508)
  • Loading branch information
alexey-milovidov authored and alesapin committed Jul 10, 2020
1 parent 7290f26 commit 595c0cb
Show file tree
Hide file tree
Showing 5 changed files with 34 additions and 11 deletions.
12 changes: 7 additions & 5 deletions programs/server/HTTPHandler.cpp
Expand Up @@ -220,15 +220,12 @@ HTTPHandler::HTTPHandler(IServer & server_, const std::string & name)


void HTTPHandler::processQuery(
Context & context,
Poco::Net::HTTPServerRequest & request,
HTMLForm & params,
Poco::Net::HTTPServerResponse & response,
Output & used_output)
{
Context context = server.context();

CurrentThread::QueryScope query_scope(context);

LOG_TRACE(log, "Request URI: " << request.getURI());

std::istream & istr = request.stream();
Expand Down Expand Up @@ -667,6 +664,11 @@ void HTTPHandler::handleRequest(Poco::Net::HTTPServerRequest & request, Poco::Ne
setThreadName("HTTPHandler");
ThreadStatus thread_status;

/// Should be initialized before anything,
/// For correct memory accounting.
Context context = server.context();
CurrentThread::QueryScope query_scope(context);

Output used_output;

/// In case of exception, send stack trace to client.
Expand All @@ -690,7 +692,7 @@ void HTTPHandler::handleRequest(Poco::Net::HTTPServerRequest & request, Poco::Ne
throw Exception("The Transfer-Encoding is not chunked and there is no Content-Length header for POST request", ErrorCodes::HTTP_LENGTH_REQUIRED);
}

processQuery(request, params, response, used_output);
processQuery(context, request, params, response, used_output);
LOG_INFO(log, "Done processing query");
}
catch (...)
Expand Down
1 change: 1 addition & 0 deletions programs/server/HTTPHandler.h
Expand Up @@ -71,6 +71,7 @@ class HTTPHandler : public Poco::Net::HTTPRequestHandler

/// Also initializes 'used_output'.
void processQuery(
Context & context,
Poco::Net::HTTPServerRequest & request,
HTMLForm & params,
Poco::Net::HTTPServerResponse & response,
Expand Down
12 changes: 6 additions & 6 deletions tests/queries/0_stateless/01091_num_threads.sql
@@ -1,44 +1,44 @@
set log_queries=1;
set log_query_threads=1;

SELECT 1;
WITH 01091 AS id SELECT 1;
SYSTEM FLUSH LOGS;

WITH
(
SELECT query_id
FROM system.query_log
WHERE (query = 'SELECT 1') AND (event_date >= (today() - 1))
WHERE (query = 'WITH 01091 AS id SELECT 1;\n') AND (event_date >= (today() - 1))
ORDER BY event_time DESC
LIMIT 1
) AS id
SELECT uniqExact(thread_id)
FROM system.query_thread_log
WHERE (event_date >= (today() - 1)) AND (query_id = id) AND (thread_id != master_thread_id);

select sum(number) from numbers(1000000);
with 01091 as id select sum(number) from numbers(1000000);
SYSTEM FLUSH LOGS;

WITH
(
SELECT query_id
FROM system.query_log
WHERE (query = 'SELECT sum(number) FROM numbers(1000000)') AND (event_date >= (today() - 1))
WHERE (query LIKE 'with 01091 as id select sum(number) from numbers(1000000);%') AND (event_date >= (today() - 1))
ORDER BY event_time DESC
LIMIT 1
) AS id
SELECT uniqExact(thread_id)
FROM system.query_thread_log
WHERE (event_date >= (today() - 1)) AND (query_id = id) AND (thread_id != master_thread_id);

select sum(number) from numbers_mt(1000000);
with 01091 as id select sum(number) from numbers_mt(1000000);
SYSTEM FLUSH LOGS;

WITH
(
SELECT query_id
FROM system.query_log
WHERE (query = 'SELECT sum(number) FROM numbers_mt(1000000)') AND (event_date >= (today() - 1))
WHERE (query LIKE 'with 01091 as id select sum(number) from numbers_mt(1000000);%') AND (event_date >= (today() - 1))
ORDER BY event_time DESC
LIMIT 1
) AS id
Expand Down
@@ -0,0 +1 @@
1000
19 changes: 19 additions & 0 deletions tests/queries/0_stateless/01238_http_memory_tracking.sh
@@ -0,0 +1,19 @@
#!/usr/bin/env bash

CURDIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
. $CURDIR/../shell_config.sh

set -o pipefail

# This is needed to keep at least one running query for user for the time of test.
# (1k http queries takes ~1 second, let's run for 5x more to avoid flaps)
${CLICKHOUSE_CLIENT} --format Null -n <<<'SELECT sleepEachRow(1) FROM numbers(5)' &

# ignore "yes: standard output: Broken pipe"
yes 'SELECT 1' 2>/dev/null | {
head -n1000
} | {
xargs -i ${CLICKHOUSE_CURL} -sS "${CLICKHOUSE_URL}&wait_end_of_query=1&max_memory_usage_for_user=$((1<<30))" -d '{}'
} | grep -x -c 1

wait

0 comments on commit 595c0cb

Please sign in to comment.