Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with
or
.
Download ZIP
Fetching contributors…

Cannot retrieve contributors at this time

executable file 90 lines (85 sloc) 2.872 kb
#!/bin/sh
#
# Load Yaws with a number of concurrent curl requests for files of varying
# size up to the fd limit to make sure the sendfile driver is not serializing
# requests and creating a request queue so long that it eats up all the available
# fds. The file sizes are passed into this script as command-line arguments.
#
sizes="$@"
if [ -n "$RUNTEST_CURLS" ]; then
fdlimit=`expr $RUNTEST_CURLS / $#`
else
fdlimit=`ulimit -n`
fdlimit=`expr $fdlimit / 8`
[ $fdlimit -gt 1024 ] && fdlimit=1024
if [ `uname -s` = Darwin ]; then
proclimit=`ulimit -u`
if [ $proclimit -lt $fdlimit ]; then
newproclimit=`expr $fdlimit \* 3`
ulimit -u $newproclimit
if [ $? -ne 0 ]; then
echo unable to run enough processes for this test, skipping
echo please raise your max process limit via '"'sysctl -w kern.maxprocperuid=$newproclimit'"'
exit 0
fi
fi
fi
fi
rm -f erl_crash.dump
outdir=./runtest_out$$
trap "rm -rf $outdir" HUP INT EXIT
mkdir -p $outdir
i=0
numcurls=`expr $fdlimit '*' $#`
echo starting $numcurls background curl tasks
while [ $i -lt $fdlimit ]; do
for j in $sizes; do
curlhdrs=curl-${j}-${i}.txt
curl -D $outdir/$curlhdrs --connect-timeout 60 -s \
-o $outdir/${j}-${i}.dat http://localhost:8000/$j.dat >/dev/null 2>&1 &
p=$!
( cd $outdir ; rm -f ${p}.txt ; ln -s $curlhdrs ${p}.txt )
curls="$curls $p"
done
i=`expr $i + 1`
done
echo waiting for curl tasks to complete
for job in $curls; do
kill -0 $job 2>/dev/null
if [ $? -eq 0 ]; then
wait $job
result=$?
if [ $result -ne 0 ]; then
echo failure in background curl job $job, exit status $result
cat $outdir/${job}.txt
sleep 5
[ -f erl_crash.dump ] && echo 'Yaws crashed! See logs/report.log and erl_crash.dump for details'
exit 1
else
( cd $outdir ; rm -f `readlink ${job}.txt` ${job}.txt )
fi
fi
done
echo verifying all retrieved files
# On some systems (e.g. MacBook Pro) it seems to take a bit for the
# file writes to actually appear. The sleep and ls below help the loop
# avoid failing due to files that just haven't appeared yet.
sleep 5
ls $outdir >/dev/null 2>&1
for j in $sizes; do
count=`ls -1 $outdir/${j}*.dat | wc -l`
if [ $count -ne $fdlimit ]; then
echo not all $j byte data files retrieved: expected $fdlimit, got $count
exit 1
fi
none=`ls -l $outdir/${j}*.dat | awk '{print $5}' | grep -v $j`
if [ -n "$none" ]; then
echo not all $j byte data files have the right size
exit 1
fi
done
if [ -f erl_crash.dump ]; then
echo yaws crashed, see logs/report.log and erl_crash.dump for details
exit 1
fi
exit 0
Jump to Line
Something went wrong with that request. Please try again.