From 004f47502ca074bd2591e215512fd2e18bd8659a Mon Sep 17 00:00:00 2001 From: Shulhan Date: Sat, 16 Sep 2023 23:18:02 +0700 Subject: bufio: realign struct Scanner This reduce the Scanner allocation size from 112 to 72 bytes. --- src/bufio/scan.go | 4 ++-- src/bufio/scan_test.go | 10 +++++----- 2 files changed, 7 insertions(+), 7 deletions(-) (limited to 'src') diff --git a/src/bufio/scan.go b/src/bufio/scan.go index a26b2ff17d..ce4c1721d4 100644 --- a/src/bufio/scan.go +++ b/src/bufio/scan.go @@ -29,12 +29,12 @@ import ( type Scanner struct { r io.Reader // The reader provided by the client. split SplitFunc // The function to split the tokens. - maxTokenSize int // Maximum size of a token; modified by tests. + err error // Sticky error. token []byte // Last token returned by split. buf []byte // Buffer used as argument to split. + maxTokenSize int // Maximum size of a token; modified by tests. start int // First non-processed byte in buf. end int // End of data in buf. - err error // Sticky error. empties int // Count of successive empty tokens. scanCalled bool // Scan has been called; buffer is in use. done bool // Scan has finished. diff --git a/src/bufio/scan_test.go b/src/bufio/scan_test.go index 6b64f7ba9c..bfb148f261 100644 --- a/src/bufio/scan_test.go +++ b/src/bufio/scan_test.go @@ -136,8 +136,8 @@ func TestScanWords(t *testing.T) { // slowReader is a reader that returns only a few bytes at a time, to test the incremental // reads in Scanner.Scan. type slowReader struct { - max int buf io.Reader + max int } func (sr *slowReader) Read(p []byte) (n int, err error) { @@ -188,7 +188,7 @@ func TestScanLongLines(t *testing.T) { buf.Write(tmp.Bytes()) lineNum++ } - s := NewScanner(&slowReader{1, buf}) + s := NewScanner(&slowReader{max: 1, buf: buf}) s.Split(ScanLines) s.MaxTokenSize(smallMaxTokenSize) j = 0 @@ -224,7 +224,7 @@ func TestScanLineTooLong(t *testing.T) { buf.Write(tmp.Bytes()) lineNum++ } - s := NewScanner(&slowReader{3, buf}) + s := NewScanner(&slowReader{max: 3, buf: buf}) s.Split(ScanLines) s.MaxTokenSize(smallMaxTokenSize) j = 0 @@ -249,7 +249,7 @@ func TestScanLineTooLong(t *testing.T) { // Test that the line splitter handles a final line without a newline. func testNoNewline(text string, lines []string, t *testing.T) { buf := strings.NewReader(text) - s := NewScanner(&slowReader{7, buf}) + s := NewScanner(&slowReader{max: 7, buf: buf}) s.Split(ScanLines) for lineNum := 0; s.Scan(); lineNum++ { line := lines[lineNum] @@ -325,7 +325,7 @@ func TestSplitError(t *testing.T) { // Read the data. const text = "abcdefghijklmnopqrstuvwxyz" buf := strings.NewReader(text) - s := NewScanner(&slowReader{1, buf}) + s := NewScanner(&slowReader{max: 1, buf: buf}) s.Split(errorSplit) var i int for i = 0; s.Scan(); i++ { -- cgit v1.3