Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
86 changes: 85 additions & 1 deletion pkg/buffer/buffer.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,15 @@ package buffer
import (
"bufio"
"fmt"
"io"
"net/http"
"strings"
)

// maxLineSize is the maximum size for a single log line (10MB).
// GitHub Actions logs can contain extremely long lines (base64 content, minified JS, etc.)
const maxLineSize = 10 * 1024 * 1024

// ProcessResponseAsRingBufferToEnd reads the body of an HTTP response line by line,
// storing only the last maxJobLogLines lines using a ring buffer (sliding window).
// This efficiently retains the most recent lines, overwriting older ones as needed.
Expand All @@ -25,6 +30,7 @@ import (
//
// The function uses a ring buffer to efficiently store only the last maxJobLogLines lines.
// If the response contains more lines than maxJobLogLines, only the most recent lines are kept.
// Lines exceeding maxLineSize are truncated with a marker.
func ProcessResponseAsRingBufferToEnd(httpResp *http.Response, maxJobLogLines int) (string, int, *http.Response, error) {
if maxJobLogLines > 100000 {
maxJobLogLines = 100000
Expand All @@ -36,7 +42,8 @@ func ProcessResponseAsRingBufferToEnd(httpResp *http.Response, maxJobLogLines in
writeIndex := 0

scanner := bufio.NewScanner(httpResp.Body)
scanner.Buffer(make([]byte, 0, 64*1024), 1024*1024)
// Set initial buffer to 64KB and max token size to 10MB to handle very long lines
scanner.Buffer(make([]byte, 0, 64*1024), maxLineSize)

for scanner.Scan() {
line := scanner.Text()
Expand All @@ -48,6 +55,11 @@ func ProcessResponseAsRingBufferToEnd(httpResp *http.Response, maxJobLogLines in
}

if err := scanner.Err(); err != nil {
// If we hit a token too long error, fall back to byte-by-byte reading
// with line truncation to handle extremely long lines gracefully
if err == bufio.ErrTooLong {
return processWithLongLineHandling(httpResp.Body, lines, validLines, totalLines, writeIndex, maxJobLogLines)
}
return "", 0, httpResp, fmt.Errorf("failed to read log content: %w", err)
}

Expand All @@ -71,3 +83,75 @@ func ProcessResponseAsRingBufferToEnd(httpResp *http.Response, maxJobLogLines in

return strings.Join(result, "\n"), totalLines, httpResp, nil
}

// processWithLongLineHandling continues processing after encountering a line
// that exceeds the scanner's max token size. It reads byte-by-byte and
// truncates extremely long lines instead of failing.
func processWithLongLineHandling(body io.Reader, lines []string, validLines []bool, totalLines, writeIndex, maxJobLogLines int) (string, int, *http.Response, error) {
// Add a marker that we encountered truncated content
truncatedMarker := "[LINE TRUNCATED - exceeded maximum line length of 10MB]"
lines[writeIndex] = truncatedMarker
validLines[writeIndex] = true
totalLines++
writeIndex = (writeIndex + 1) % maxJobLogLines

// Continue reading with a buffered reader, truncating long lines
reader := bufio.NewReader(body)
var currentLine strings.Builder
const maxDisplayLength = 1000 // Keep first 1000 chars of truncated lines

for {
b, err := reader.ReadByte()
if err == io.EOF {
// Handle final line without newline
if currentLine.Len() > 0 {
line := currentLine.String()
if len(line) > maxLineSize {
line = line[:maxDisplayLength] + "... [TRUNCATED]"
}
lines[writeIndex] = line
validLines[writeIndex] = true
totalLines++
}
break
}
if err != nil {
return "", 0, nil, fmt.Errorf("failed to read log content: %w", err)
}

if b == '\n' {
line := currentLine.String()
if len(line) > maxLineSize {
line = line[:maxDisplayLength] + "... [TRUNCATED]"
}
lines[writeIndex] = line
validLines[writeIndex] = true
totalLines++
writeIndex = (writeIndex + 1) % maxJobLogLines
currentLine.Reset()
} else if currentLine.Len() < maxLineSize+maxDisplayLength {
// Stop accumulating bytes once we exceed the limit (plus buffer for truncation message)
currentLine.WriteByte(b)
}
}

var result []string
linesInBuffer := totalLines
if linesInBuffer > maxJobLogLines {
linesInBuffer = maxJobLogLines
}

startIndex := 0
if totalLines > maxJobLogLines {
startIndex = writeIndex
}

for i := 0; i < linesInBuffer; i++ {
idx := (startIndex + i) % maxJobLogLines
if validLines[idx] {
result = append(result, lines[idx])
}
}

return strings.Join(result, "\n"), totalLines, nil, nil
Copy link

Copilot AI Jan 26, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

When the fallback function is called, it has already lost the httpResp reference but returns nil for the *http.Response parameter. The caller expects to receive the httpResp back (as seen in the function signature and normal return path), but the fallback path returns nil. This inconsistency could cause nil pointer dereferences in calling code that expects a non-nil response. Either return httpResp consistently or document why nil is acceptable in this error recovery path.

Copilot uses AI. Check for mistakes.
}
79 changes: 79 additions & 0 deletions pkg/buffer/buffer_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,79 @@
package buffer

import (
"io"
"net/http"
"strings"
"testing"

"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)

func TestProcessResponseAsRingBufferToEnd(t *testing.T) {
t.Run("normal lines", func(t *testing.T) {
body := "line1\nline2\nline3\n"
resp := &http.Response{
Body: io.NopCloser(strings.NewReader(body)),
}

result, totalLines, respOut, err := ProcessResponseAsRingBufferToEnd(resp, 10)
if respOut != nil && respOut.Body != nil {
defer respOut.Body.Close()
}
require.NoError(t, err)
assert.Equal(t, 3, totalLines)
assert.Equal(t, "line1\nline2\nline3", result)
})

t.Run("ring buffer keeps last N lines", func(t *testing.T) {
body := "line1\nline2\nline3\nline4\nline5\n"
resp := &http.Response{
Body: io.NopCloser(strings.NewReader(body)),
}

result, totalLines, respOut, err := ProcessResponseAsRingBufferToEnd(resp, 3)
if respOut != nil && respOut.Body != nil {
defer respOut.Body.Close()
}
require.NoError(t, err)
assert.Equal(t, 5, totalLines)
assert.Equal(t, "line3\nline4\nline5", result)
})

t.Run("handles very long line exceeding 10MB", func(t *testing.T) {
// Create a line that exceeds maxLineSize (10MB)
longLine := strings.Repeat("x", 11*1024*1024) // 11MB
body := "line1\n" + longLine + "\nline3\n"
resp := &http.Response{
Body: io.NopCloser(strings.NewReader(body)),
}

result, totalLines, respOut, err := ProcessResponseAsRingBufferToEnd(resp, 100)
if respOut != nil && respOut.Body != nil {
defer respOut.Body.Close()
}
require.NoError(t, err)
// Should have processed lines with truncation marker
assert.Greater(t, totalLines, 0)
assert.Contains(t, result, "TRUNCATED")
})

t.Run("handles line at exactly max size", func(t *testing.T) {
// Create a line just under maxLineSize
longLine := strings.Repeat("a", 1024*1024) // 1MB - should work fine
body := "start\n" + longLine + "\nend\n"
resp := &http.Response{
Body: io.NopCloser(strings.NewReader(body)),
}

result, totalLines, respOut, err := ProcessResponseAsRingBufferToEnd(resp, 100)
if respOut != nil && respOut.Body != nil {
defer respOut.Body.Close()
}
require.NoError(t, err)
assert.Equal(t, 3, totalLines)
assert.Contains(t, result, "start")
assert.Contains(t, result, "end")
})
}
Loading