Skip to content
This repository has been archived by the owner on Apr 21, 2023. It is now read-only.


Browse files Browse the repository at this point in the history
rewrite-options: don't turn on CoreFilters just because of query params
Fixes an nginx-only bug: apache/incubator-pagespeed-ngx#1190

ngx_pagespeed side of the change:
  • Loading branch information
jeffkaufman authored and crowell committed Aug 4, 2016
1 parent bf817f7 commit 2a3b127
Show file tree
Hide file tree
Showing 3 changed files with 92 additions and 0 deletions.
8 changes: 8 additions & 0 deletions install/debug.conf.template
Expand Up @@ -1428,6 +1428,14 @@ ModPagespeedMessagesDomains Allow localhost
ModPagespeedGlobalAdminDomains Allow

<VirtualHost localhost:@@APACHE_SECONDARY_PORT@@>
DocumentRoot "@@APACHE_DOC_ROOT@@"
ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@"
ModPagespeedRewriteLevel PassThrough
ModPagespeedEnableFilters debug

#STATS_LOGGING ModPagespeedStatistics on
#STATS_LOGGING ModPagespeedStatisticsLogging on
Expand Down
73 changes: 73 additions & 0 deletions pagespeed/automatic/
Expand Up @@ -785,3 +785,76 @@ function kill_port {
kill -9 $PID

# Kills the process listening on a port if the name matches the first argument.
# usage:
# kill_listener_port program_name port
function kill_listener_port {
kill -9 $(lsof -t -i TCP:${PORT} -s TCP:LISTEN -a -c "/^${CMDLINE}$/") || true

# Performs timed reads on the output from a command passed via $1. The stream
# will be interpreted as a chunked http encoding. Each chunk will be allowed
# at most threshold_sec ($2) seconds to be read or the function will fail. When
# the stream is fully read, the funcion will compare the total number of http
# chunks read with expect_chunk_count ($3) and fail on mismatch.
# Usage:
# check_flushing "curl -N --raw --silent --proxy $SECONDARY_HOSTNAME $URL" 5 1
# This will check if the curl command resulted in single chunk which was read
# within one second or less.
function check_flushing() {
local command="$1"
local threshold_sec="$2"
local expect_chunk_count="$3"
local output=""
local start=$(date +%s%N)
local chunk_count=0

echo "Command: $command"

if [ "${USE_VALGRIND:-}" = true ]; then
# We can't say much about correctness of timings under valgrind, so relax
# the test for that.
threshold_sec=$(echo "scale=2; $threshold_sec*10" | bc)

while true; do
start=$(date +%s%N)
# Read the http chunk size from the stream. This is also the read which
# checks timings.
check read -t $threshold_sec line
echo "Chunk number [$chunk_count] has size: $line"
line=$(echo $line | tr -d '\n' | tr -d '\r')
# If we read 0 that means we have finished reading the stream.
if [ $((16#$line)) -eq "0" ] ; then
check [ $expect_chunk_count -le $chunk_count ]
let chunk_count=chunk_count+1
# read the actual data from the stream, using the amount indicated in
# the previous read. This read should be fast.
# Note that we need to clear IFS for read since otherwise it can get
# confused by whitespace-only chunks.
IFS= check read -N $((16#$line)) line
echo "Chunk data: $line"
# Read the trailing \r\n - should be fast.
check read -N 2 line
done < <($command)
check 0

# Given the output of a page with ?PageSpeedFilters=+debug, print the section of
# the page where it lists what filters are enabled.
function extract_filters_from_debug_html() {
local debug_output="$1"

# Pull out the non-blank lines between "Filters:" and Options:". First
# convert newlines to % so sed can operate on the whole file, then put them
# back again.
check_from -q "$debug_output" grep -q "^Filters:$"
check_from -q "$debug_output" grep -q "^Options:$"
echo "$debug_output" | tr '\n' '%' | sed 's~.*%Filters:%~~' \
| sed "s~%Options:.*~~" | tr '%' '\n'
11 changes: 11 additions & 0 deletions pagespeed/system/
Expand Up @@ -589,6 +589,17 @@ if [ "$SECONDARY_HOSTNAME" != "" ]; then
check [ `grep -c '<style>[.]blue{[^}]*}[.]bold{[^}]*}</style>' \

start_test query params dont turn on core filters
# See
FILTERS=$(extract_filters_from_debug_html "$OUT")
check_from "$FILTERS" grep -q "^db.*Debug$"
check_from "$FILTERS" grep -q "^hw.*Flushes html$"
check_not_from "$FILTERS" grep -q "^jm.*Rewrite External Javascript$"
check_not_from "$FILTERS" grep -q "^jj.*Rewrite Inline Javascript$"

start_test OptimizeForBandwidth
# We use blocking-rewrite tests because we want to make sure we don't
# get rewritten URLs when we don't want them.
Expand Down

0 comments on commit 2a3b127

Please sign in to comment.