diff options
| author | Matthew Dempsky <mdempsky@google.com> | 2019-10-28 15:30:35 -0700 |
|---|---|---|
| committer | Matthew Dempsky <mdempsky@google.com> | 2019-11-05 00:00:43 +0000 |
| commit | ea0b4e7c7db8c5d376e77fd3e6741d94685073ac (patch) | |
| tree | 8afdc5178caea495179b453a704b6207d3dc1ad0 /src/runtime | |
| parent | e341e93c519ef22ed4759fd0b4643a30321b9222 (diff) | |
| download | go-ea0b4e7c7db8c5d376e77fd3e6741d94685073ac.tar.xz | |
cmd/compile, runtime: add comparison tracing for libFuzzer
This CL extends cmd/compile's experimental libFuzzer support with
calls to __sanitizer_cov_trace_{,const_}cmp{1,2,4,8}. This allows much
more efficient fuzzing of comparisons.
Only supports amd64 and arm64 for now.
Updates #14565.
Change-Id: Ibf82a8d9658f2bc50d955bdb1ae26723a3f0584d
Reviewed-on: https://go-review.googlesource.com/c/go/+/203887
Run-TryBot: Matthew Dempsky <mdempsky@google.com>
TryBot-Result: Gobot Gobot <gobot@golang.org>
Reviewed-by: Keith Randall <khr@golang.org>
Diffstat (limited to 'src/runtime')
| -rw-r--r-- | src/runtime/libfuzzer.go | 75 | ||||
| -rw-r--r-- | src/runtime/libfuzzer_amd64.s | 42 | ||||
| -rw-r--r-- | src/runtime/libfuzzer_arm64.s | 31 |
3 files changed, 148 insertions, 0 deletions
diff --git a/src/runtime/libfuzzer.go b/src/runtime/libfuzzer.go new file mode 100644 index 0000000000..0161955f09 --- /dev/null +++ b/src/runtime/libfuzzer.go @@ -0,0 +1,75 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build libfuzzer + +package runtime + +import _ "unsafe" // for go:linkname + +func libfuzzerCall(fn *byte, arg0, arg1 uintptr) + +func libfuzzerTraceCmp1(arg0, arg1 uint8) { + libfuzzerCall(&__sanitizer_cov_trace_cmp1, uintptr(arg0), uintptr(arg1)) +} + +func libfuzzerTraceCmp2(arg0, arg1 uint16) { + libfuzzerCall(&__sanitizer_cov_trace_cmp2, uintptr(arg0), uintptr(arg1)) +} + +func libfuzzerTraceCmp4(arg0, arg1 uint32) { + libfuzzerCall(&__sanitizer_cov_trace_cmp4, uintptr(arg0), uintptr(arg1)) +} + +func libfuzzerTraceCmp8(arg0, arg1 uint64) { + libfuzzerCall(&__sanitizer_cov_trace_cmp8, uintptr(arg0), uintptr(arg1)) +} + +func libfuzzerTraceConstCmp1(arg0, arg1 uint8) { + libfuzzerCall(&__sanitizer_cov_trace_const_cmp1, uintptr(arg0), uintptr(arg1)) +} + +func libfuzzerTraceConstCmp2(arg0, arg1 uint16) { + libfuzzerCall(&__sanitizer_cov_trace_const_cmp2, uintptr(arg0), uintptr(arg1)) +} + +func libfuzzerTraceConstCmp4(arg0, arg1 uint32) { + libfuzzerCall(&__sanitizer_cov_trace_const_cmp4, uintptr(arg0), uintptr(arg1)) +} + +func libfuzzerTraceConstCmp8(arg0, arg1 uint64) { + libfuzzerCall(&__sanitizer_cov_trace_const_cmp8, uintptr(arg0), uintptr(arg1)) +} + +//go:linkname __sanitizer_cov_trace_cmp1 __sanitizer_cov_trace_cmp1 +//go:cgo_import_static __sanitizer_cov_trace_cmp1 +var __sanitizer_cov_trace_cmp1 byte + +//go:linkname __sanitizer_cov_trace_cmp2 __sanitizer_cov_trace_cmp2 +//go:cgo_import_static __sanitizer_cov_trace_cmp2 +var __sanitizer_cov_trace_cmp2 byte + +//go:linkname __sanitizer_cov_trace_cmp4 __sanitizer_cov_trace_cmp4 +//go:cgo_import_static __sanitizer_cov_trace_cmp4 +var __sanitizer_cov_trace_cmp4 byte + +//go:linkname __sanitizer_cov_trace_cmp8 __sanitizer_cov_trace_cmp8 +//go:cgo_import_static __sanitizer_cov_trace_cmp8 +var __sanitizer_cov_trace_cmp8 byte + +//go:linkname __sanitizer_cov_trace_const_cmp1 __sanitizer_cov_trace_const_cmp1 +//go:cgo_import_static __sanitizer_cov_trace_const_cmp1 +var __sanitizer_cov_trace_const_cmp1 byte + +//go:linkname __sanitizer_cov_trace_const_cmp2 __sanitizer_cov_trace_const_cmp2 +//go:cgo_import_static __sanitizer_cov_trace_const_cmp2 +var __sanitizer_cov_trace_const_cmp2 byte + +//go:linkname __sanitizer_cov_trace_const_cmp4 __sanitizer_cov_trace_const_cmp4 +//go:cgo_import_static __sanitizer_cov_trace_const_cmp4 +var __sanitizer_cov_trace_const_cmp4 byte + +//go:linkname __sanitizer_cov_trace_const_cmp8 __sanitizer_cov_trace_const_cmp8 +//go:cgo_import_static __sanitizer_cov_trace_const_cmp8 +var __sanitizer_cov_trace_const_cmp8 byte diff --git a/src/runtime/libfuzzer_amd64.s b/src/runtime/libfuzzer_amd64.s new file mode 100644 index 0000000000..890fde341b --- /dev/null +++ b/src/runtime/libfuzzer_amd64.s @@ -0,0 +1,42 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build libfuzzer + +#include "go_asm.h" +#include "go_tls.h" +#include "textflag.h" + +// Based on race_amd64.s; see commentary there. + +#ifdef GOOS_windows +#define RARG0 CX +#define RARG1 DX +#else +#define RARG0 DI +#define RARG1 SI +#endif + +// void runtime·libfuzzerCall(fn, arg0, arg1 uintptr) +// Calls C function fn from libFuzzer and passes 2 arguments to it. +TEXT runtime·libfuzzerCall(SB), NOSPLIT, $0-24 + MOVQ fn+0(FP), AX + MOVQ arg0+8(FP), RARG0 + MOVQ arg1+16(FP), RARG1 + + get_tls(R12) + MOVQ g(R12), R14 + MOVQ g_m(R14), R13 + + // Switch to g0 stack. + MOVQ SP, R12 // callee-saved, preserved across the CALL + MOVQ m_g0(R13), R10 + CMPQ R10, R14 + JE call // already on g0 + MOVQ (g_sched+gobuf_sp)(R10), SP +call: + ANDQ $~15, SP // alignment for gcc ABI + CALL AX + MOVQ R12, SP + RET diff --git a/src/runtime/libfuzzer_arm64.s b/src/runtime/libfuzzer_arm64.s new file mode 100644 index 0000000000..121673e092 --- /dev/null +++ b/src/runtime/libfuzzer_arm64.s @@ -0,0 +1,31 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build libfuzzer + +#include "go_asm.h" +#include "textflag.h" + +// Based on race_arm64.s; see commentary there. + +// func runtime·libfuzzerCall(fn, arg0, arg1 uintptr) +// Calls C function fn from libFuzzer and passes 2 arguments to it. +TEXT runtime·libfuzzerCall(SB), NOSPLIT, $0-24 + MOVD fn+0(FP), R9 + MOVD arg0+8(FP), R0 + MOVD arg1+16(FP), R1 + + MOVD g_m(g), R10 + + // Switch to g0 stack. + MOVD RSP, R19 // callee-saved, preserved across the CALL + MOVD m_g0(R10), R11 + CMP R11, g + BEQ call // already on g0 + MOVD (g_sched+gobuf_sp)(R11), R12 + MOVD R12, RSP +call: + BL R9 + MOVD R19, RSP + RET |
