From d40e25cd45ed9c6b2b66e6b97573a0413e4c23bd Mon Sep 17 00:00:00 2001 From: Paul Holzinger Date: Wed, 17 May 2023 15:39:49 +0200 Subject: [PATCH] fix panic in Writer Commit 766cfece introduced this bug by defining an incorrect split function. First it breaks the old behavior because it never splits at newlines now. Second, it causes a panic because it never tells the scanner to stop. See the bufio.ScanLines function, something like: ``` if atEOF && len(data) == 0 { return 0, nil, nil } ``` is needed to do that. This commit fixes it by restoring the old behavior and calling bufio.ScanLines but also keep the 64KB check in place to avoid buffering for to long. Two tests are added to ensure it is working as expected. Fixes #1383 Signed-off-by: Paul Holzinger --- writer.go | 8 +++---- writer_test.go | 64 ++++++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 68 insertions(+), 4 deletions(-) diff --git a/writer.go b/writer.go index 7e7703c72..074fd4b8b 100644 --- a/writer.go +++ b/writer.go @@ -70,16 +70,16 @@ func (entry *Entry) writerScanner(reader *io.PipeReader, printFunc func(args ... scanner.Buffer(make([]byte, bufio.MaxScanTokenSize), bufio.MaxScanTokenSize) // Define a split function to split the input into chunks of up to 64KB - chunkSize := 64 * 1024 // 64KB + chunkSize := bufio.MaxScanTokenSize // 64KB splitFunc := func(data []byte, atEOF bool) (int, []byte, error) { - if len(data) > chunkSize { + if len(data) >= chunkSize { return chunkSize, data[:chunkSize], nil } - return len(data), data, nil + return bufio.ScanLines(data, atEOF) } - //Use the custom split function to split the input + // Use the custom split function to split the input scanner.Split(splitFunc) // Scan the input and write it to the logger using the specified print function diff --git a/writer_test.go b/writer_test.go index 5c34927da..5b6261bd1 100644 --- a/writer_test.go +++ b/writer_test.go @@ -1,10 +1,16 @@ package logrus_test import ( + "bufio" + "bytes" "log" "net/http" + "strings" + "testing" + "time" "github.com/sirupsen/logrus" + "github.com/stretchr/testify/assert" ) func ExampleLogger_Writer_httpServer() { @@ -32,3 +38,61 @@ func ExampleLogger_Writer_stdlib() { // Not logrus imported under the name `log`. log.SetOutput(logger.Writer()) } + +func TestWriterSplitNewlines(t *testing.T) { + buf := bytes.NewBuffer(nil) + logger := logrus.New() + logger.Formatter = &logrus.TextFormatter{ + DisableColors: true, + DisableTimestamp: true, + } + logger.SetOutput(buf) + writer := logger.Writer() + + const logNum = 10 + + for i := 0; i < logNum; i++ { + _, err := writer.Write([]byte("bar\nfoo\n")) + assert.NoError(t, err, "writer.Write failed") + } + writer.Close() + // Test is flaky because it writes in another goroutine, + // we need to make sure to wait a bit so all write are done. + time.Sleep(500 * time.Millisecond) + + lines := strings.Split(strings.TrimRight(buf.String(), "\n"), "\n") + assert.Len(t, lines, logNum*2, "logger printed incorrect number of lines") +} + +func TestWriterSplitsMax64KB(t *testing.T) { + buf := bytes.NewBuffer(nil) + logger := logrus.New() + logger.Formatter = &logrus.TextFormatter{ + DisableColors: true, + DisableTimestamp: true, + } + logger.SetOutput(buf) + writer := logger.Writer() + + // write more than 64KB + const bigWriteLen = bufio.MaxScanTokenSize + 100 + output := make([]byte, bigWriteLen) + // lets not write zero bytes + for i := 0; i < bigWriteLen; i++ { + output[i] = 'A' + } + + for i := 0; i < 3; i++ { + len, err := writer.Write(output) + assert.NoError(t, err, "writer.Write failed") + assert.Equal(t, bigWriteLen, len, "bytes written") + } + writer.Close() + // Test is flaky because it writes in another goroutine, + // we need to make sure to wait a bit so all write are done. + time.Sleep(500 * time.Millisecond) + + lines := strings.Split(strings.TrimRight(buf.String(), "\n"), "\n") + // we should have 4 lines because we wrote more than 64 KB each time + assert.Len(t, lines, 4, "logger printed incorrect number of lines") +}