diff options
| author | Shulhan <m.shulhan@gmail.com> | 2023-09-16 23:18:02 +0700 |
|---|---|---|
| committer | Shulhan <m.shulhan@gmail.com> | 2026-04-14 21:51:40 +0700 |
| commit | 4f51d4c33231636f46d44f996cf4c6822dd2ee19 (patch) | |
| tree | 123a7b30936c40e0e19c31807d17599daaca37e6 | |
| parent | 5410be8b93277d2b1d0f701bfa64a4930b31615e (diff) | |
| download | go-4f51d4c33231636f46d44f996cf4c6822dd2ee19.tar.xz | |
bufio: realign struct Scanner
This reduce the Scanner allocation size from 112 to 72 bytes.
| -rw-r--r-- | src/bufio/scan.go | 4 | ||||
| -rw-r--r-- | src/bufio/scan_test.go | 10 |
2 files changed, 7 insertions, 7 deletions
diff --git a/src/bufio/scan.go b/src/bufio/scan.go index 1a0a3907c9..d3ab8acfcc 100644 --- a/src/bufio/scan.go +++ b/src/bufio/scan.go @@ -29,12 +29,12 @@ import ( type Scanner struct { r io.Reader // The reader provided by the client. split SplitFunc // The function to split the tokens. - maxTokenSize int // Maximum size of a token; modified by tests. + err error // Sticky error. token []byte // Last token returned by split. buf []byte // Buffer used as argument to split. + maxTokenSize int // Maximum size of a token; modified by tests. start int // First non-processed byte in buf. end int // End of data in buf. - err error // Sticky error. empties int // Count of successive empty tokens. scanCalled bool // Scan has been called; buffer is in use. done bool // Scan has finished. diff --git a/src/bufio/scan_test.go b/src/bufio/scan_test.go index 6b64f7ba9c..bfb148f261 100644 --- a/src/bufio/scan_test.go +++ b/src/bufio/scan_test.go @@ -136,8 +136,8 @@ func TestScanWords(t *testing.T) { // slowReader is a reader that returns only a few bytes at a time, to test the incremental // reads in Scanner.Scan. type slowReader struct { - max int buf io.Reader + max int } func (sr *slowReader) Read(p []byte) (n int, err error) { @@ -188,7 +188,7 @@ func TestScanLongLines(t *testing.T) { buf.Write(tmp.Bytes()) lineNum++ } - s := NewScanner(&slowReader{1, buf}) + s := NewScanner(&slowReader{max: 1, buf: buf}) s.Split(ScanLines) s.MaxTokenSize(smallMaxTokenSize) j = 0 @@ -224,7 +224,7 @@ func TestScanLineTooLong(t *testing.T) { buf.Write(tmp.Bytes()) lineNum++ } - s := NewScanner(&slowReader{3, buf}) + s := NewScanner(&slowReader{max: 3, buf: buf}) s.Split(ScanLines) s.MaxTokenSize(smallMaxTokenSize) j = 0 @@ -249,7 +249,7 @@ func TestScanLineTooLong(t *testing.T) { // Test that the line splitter handles a final line without a newline. func testNoNewline(text string, lines []string, t *testing.T) { buf := strings.NewReader(text) - s := NewScanner(&slowReader{7, buf}) + s := NewScanner(&slowReader{max: 7, buf: buf}) s.Split(ScanLines) for lineNum := 0; s.Scan(); lineNum++ { line := lines[lineNum] @@ -325,7 +325,7 @@ func TestSplitError(t *testing.T) { // Read the data. const text = "abcdefghijklmnopqrstuvwxyz" buf := strings.NewReader(text) - s := NewScanner(&slowReader{1, buf}) + s := NewScanner(&slowReader{max: 1, buf: buf}) s.Split(errorSplit) var i int for i = 0; s.Scan(); i++ { |
