Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 9 additions & 0 deletions src/site/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,15 @@ configure_file( ${CMAKE_CURRENT_SOURCE_DIR}/markdown/download.md.in
configure_file( ${CMAKE_CURRENT_SOURCE_DIR}/markdown/development/build-cmake.md.in
${CMAKE_CURRENT_BINARY_DIR}/markdown/development/build-cmake.md )

configure_file( ${CMAKE_CURRENT_SOURCE_DIR}/echarts.min.js
${CMAKE_CURRENT_BINARY_DIR}/html/echarts.min.js
COPYONLY
)
configure_file( ${CMAKE_CURRENT_SOURCE_DIR}/generate_appending_a_log_message.js
${CMAKE_CURRENT_BINARY_DIR}/html/generate_appending_a_log_message.js
COPYONLY
)

add_custom_target( doc_doxygen ALL
COMMAND ${DOXYGEN_EXECUTABLE} ${CMAKE_CURRENT_BINARY_DIR}/Doxyfile
WORKING_DIRECTORY ${LOG4CXX_SOURCE_DIR}
Expand Down
1 change: 1 addition & 0 deletions src/site/echarts.min.js

Large diffs are not rendered by default.

98 changes: 98 additions & 0 deletions src/site/generate_appending_a_log_message.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,98 @@

// Get the DOM container for the plot
var containerDOM = document.getElementById('appending_a_log_message_plot');
if (!containerDOM) {
throw new Error("Could not find 'appending_a_log_message_plot' element");
}
var myChart = echarts.init(containerDOM, null, { renderer: 'canvas' });


// Find the benchmark html table
var benchmark_data = null;
var element = document.getElementById('benchmark_data_marker');
while (element && element.tagName) {
if (element.tagName === 'TABLE') {
benchmark_data = element;
break;
}
element = element.nextElementSibling;
}
if (!benchmark_data) {
throw new Error("Could not find benchmark data");;
}

// Identify the benchmark tests to be included on the plot
var benchmark_pattern = [];
benchmark_pattern.push(new RegExp("Appending (.*) using ([A-Za-z]+), pattern: \\%m\\%n$"));
benchmark_pattern.push(new RegExp("Async, Sending (.*) using ([A-Za-z <]+)$"));
const value_regex_pattern = new RegExp("([0-9]+) ns")

// Extract the data
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

some thoughts:

  1. instead of extracting the data from the table, we can have a JSON file generated from the benchmark application that can easily be sucked into the JS code
  2. if we generate the JSON file, should we just generate the table on the fly? this could be done with a simple python script during the build process if we want to keep the data static.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It is simpler (zero maintenance) to use the data in the page to generate the graph.

var plot_data = new Map();
var xAxisLabels = [];
for (const row of benchmark_data.rows) {
const columns = row.cells;
if (2 < columns.length) {
const value_match = value_regex_pattern.exec(columns[1].innerText);
if (value_match && 1 < value_match.length) {
for (const pattern of benchmark_pattern) {
const benchmark_match = pattern.exec(columns[0].innerText);
if (benchmark_match && 2 < benchmark_match.length) {
if (!xAxisLabels.includes(benchmark_match[1])) {
xAxisLabels.push(benchmark_match[1]);
}
var keyValueMap = plot_data.get(benchmark_match[2]);
if (!keyValueMap) {
keyValueMap = new Map();
plot_data.set(benchmark_match[2], keyValueMap);
}
keyValueMap.set(benchmark_match[1], value_match[1]);
}
}
}
}
}

// Generate a series for each legend
var legend_data = [];
var series_data = [];
for (const [key, keyValueMap] of plot_data.entries()) {
legend_data.push(key);
var series_values = [];
for (const label of xAxisLabels) {
var value = keyValueMap.get(label);
series_values.push(value ? parseInt(value) : null);
}
var series_data_item = {
name: key,
type: 'line',
data: series_values
};
series_data.push(series_data_item);
}

// Configure the chart
var chart_data = {
title: { text: 'Appending a log message' },
yAxis: {
name: 'Average elapsed time (ns)',
nameLocation: 'center'
},
legend: {
orient: 'vertical',
left: 150,
top: 'center',
data: legend_data
},
xAxis: {
axisTick: { alignWithLabel: true },
axisLabel: { rotate: 30 },
name: 'Log message content',
nameLocation: 'center',
data: xAxisLabels
},
series: series_data
};

// Display the chart
myChart.setOption(chart_data);
26 changes: 19 additions & 7 deletions src/site/markdown/performance.md
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,9 @@ The "Iterations" column derivation is explained in [Google Benchmark documentati
L2 Unified 256 KiB (x4)
L3 Unified 6144 KiB (x1)
Load Average: 0.07, 0.03, 0.01

@htmlonly
<div id="benchmark_data_marker"></div>
@endhtmlonly
| Benchmark | Time | CPU | Iterations |
| --------- | -------: | --: | ---------: |
| Testing disabled logging request | 0.472 ns | 0.472 ns | 1000000000 |
Expand Down Expand Up @@ -123,20 +125,30 @@ The "Iterations" column derivation is explained in [Google Benchmark documentati
-# The "Async" benchmarks test [AsyncAppender](@ref log4cxx::AsyncAppender) throughput, with logging events discarded in the background thread.
-# The "Logging" benchmarks write to a file using buffered output. Overhead is 2-3 times more when not using buffered output.

The above table shows that the overhead of an enabled logging request
varies greatly with the message content.
A single operations-per-second number is not meaningful.
Most importantly note that [using buffered output](@ref log4cxx::FileAppender::setOption)
reduces overhead more than any other detail.
@htmlonly
<div id="appending_a_log_message_plot" style="width: 800px;height:400px;"></div>
<script src="echarts.min.js"></script>
<script src="generate_appending_a_log_message.js"></script>
@endhtmlonly

The above graph shows that the overhead of an enabled logging request
varies greatly with the message content and that
the `LOG4CXX_[level]_FMT` macros have lower overhead.
It also shows two data points where binary to text conversion
is moved to a background thread
using [AsyncBuffer](@ref log4cxx::helpers::AsyncBuffer) and [AsyncAppender](@ref log4cxx::AsyncAppender).

Note also that logging from multiple threads concurrently
Note that logging from multiple threads concurrently
to a common appender generally does not increase throughput
due to lock contention in [doAppend method](@ref log4cxx::AppenderSkeleton::doAppend).
To simplify the work of an appender implementator,
the [doAppend method](@ref log4cxx::AppenderSkeleton::doAppend) currently prevents multiple threads
concurrently entering [the append method](@ref log4cxx::AppenderSkeleton::append),
which is the method required to be implemented by a concrete appender class.

Note also that [using buffered output](@ref log4cxx::FileAppender::setOption)
reduces overhead more than any other detail.

The [AsyncAppender](@ref log4cxx::AsyncAppender) provides the least overhead
when logging concurrently from multiple threads
as it overrides the [doAppend method](@ref log4cxx::AsyncAppender::doAppend)
Expand Down
15 changes: 15 additions & 0 deletions src/site/test_echarts.html
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

test code kept intentionally?

Copy link
Contributor Author

@swebb2066 swebb2066 Nov 28, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I found a test was necessary to quickly iterate through the vast landscape of echarts options. I am agnostic on whether it should be in the repository.

Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8" />
</head>
<body>
<h1>Test page for iterating through echarts format options</h1>
<div id="appending_a_log_message_plot" style="width: 800px;height:400px;"></div>
<script src="echarts.min.js"></script>
<script src="test_echarts.js"></script>
</body>
<footer>

</footer>
</html>
36 changes: 36 additions & 0 deletions src/site/test_echarts.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
var chart_data = {
"title": { "text": "Appending a log message" },
"legend": {
"orient": "vertical",
"left": 150,
"top": "center",
"data": [ "MessageBuffer", "FMT", "FMT and AsyncBuffer", "operator<< and AsyncBuffer" ]
},
"xAxis": {
axisTick: {
alignWithLabel: true
},
axisLabel: {
rotate: 30
},
name : 'Log message content',
nameLocation : 'center',
data : [ "5 char string", "49 char string", "int value", "int+float", "int+10float" ]
},
"yAxis": {
name : 'Average elapsed time (ns)',
nameLocation : 'center'
},
"series": [
{ "type": "line", "name": "MessageBuffer", "data": [ 334, 370, 509, 911, 4579 ] },
{ "type": "line", "name": "FMT", "data": [ null, 346, 376, 508, 1671 ] },
{ "type": "line", "name": "FMT and AsyncBuffer", "data": [ null, null, null, null, 784 ] },
{ "type": "line", "name": "operator<< and AsyncBuffer", "data": [ null, null, null, null, 1211 ] }
]
};

var containerDOM = document.getElementById('appending_a_log_message_plot');
if (containerDOM) {
var myChart = echarts.init(containerDOM, null, { renderer: 'canvas' });
myChart.setOption(chart_data);
}
Loading