summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--src/runtime/mgc0.c3
-rw-r--r--src/runtime/mprof.go2
-rw-r--r--src/runtime/pprof/mprof_test.go146
-rw-r--r--src/testing/testing.go1
4 files changed, 150 insertions, 2 deletions
diff --git a/src/runtime/mgc0.c b/src/runtime/mgc0.c
index 05cabe708..2ff64aaa3 100644
--- a/src/runtime/mgc0.c
+++ b/src/runtime/mgc0.c
@@ -1507,7 +1507,6 @@ gc(struct gc_args *args)
runtime·sweep.spanidx = 0;
runtime·unlock(&runtime·mheap.lock);
- // Temporary disable concurrent sweep, because we see failures on builders.
if(ConcurrentSweep && !args->eagersweep) {
runtime·lock(&runtime·gclock);
if(runtime·sweep.g == nil)
@@ -1521,6 +1520,8 @@ gc(struct gc_args *args)
// Sweep all spans eagerly.
while(runtime·sweepone() != -1)
runtime·sweep.npausesweep++;
+ // Do an additional mProf_GC, because all 'free' events are now real as well.
+ runtime·mProf_GC();
}
runtime·mProf_GC();
diff --git a/src/runtime/mprof.go b/src/runtime/mprof.go
index 89e991523..f4676fad6 100644
--- a/src/runtime/mprof.go
+++ b/src/runtime/mprof.go
@@ -234,7 +234,7 @@ func mProf_GC() {
// Called by malloc to record a profiled block.
func mProf_Malloc(p unsafe.Pointer, size uintptr) {
var stk [maxStack]uintptr
- nstk := callers(1, &stk[0], len(stk))
+ nstk := callers(4, &stk[0], len(stk))
lock(&proflock)
b := stkbucket(memProfile, size, stk[:nstk], true)
mp := b.mp()
diff --git a/src/runtime/pprof/mprof_test.go b/src/runtime/pprof/mprof_test.go
new file mode 100644
index 000000000..1880b9a70
--- /dev/null
+++ b/src/runtime/pprof/mprof_test.go
@@ -0,0 +1,146 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package pprof_test
+
+import (
+ "bufio"
+ "bytes"
+ "fmt"
+ "io/ioutil"
+ "os"
+ "os/exec"
+ "regexp"
+ "runtime"
+ . "runtime/pprof"
+ "testing"
+ "unsafe"
+)
+
+var memSink interface{}
+
+func allocateTransient1M() {
+ for i := 0; i < 1024; i++ {
+ memSink = &struct{ x [1024]byte }{}
+ }
+}
+
+func allocateTransient2M() {
+ // prevent inlining
+ if memSink == nil {
+ panic("bad")
+ }
+ memSink = make([]byte, 2<<20)
+}
+
+type Obj32 struct {
+ link *Obj32
+ pad [32 - unsafe.Sizeof(uintptr(0))]byte
+}
+
+var persistentMemSink *Obj32
+
+func allocatePersistent1K() {
+ for i := 0; i < 32; i++ {
+ // Can't use slice because that will introduce implicit allocations.
+ obj := &Obj32{link: persistentMemSink}
+ persistentMemSink = obj
+ }
+}
+
+var memoryProfilerRun = 0
+
+func TestMemoryProfiler(t *testing.T) {
+ // Create temp file for the profile.
+ f, err := ioutil.TempFile("", "memprof")
+ if err != nil {
+ t.Fatalf("failed to create temp file: %v", err)
+ }
+ defer func() {
+ f.Close()
+ os.Remove(f.Name())
+ }()
+
+ // Disable sampling, otherwise it's difficult to assert anything.
+ oldRate := runtime.MemProfileRate
+ runtime.MemProfileRate = 1
+ defer func() {
+ runtime.MemProfileRate = oldRate
+ }()
+ // Allocate a meg to ensure that mcache.next_sample is updated to 1.
+ for i := 0; i < 1024; i++ {
+ memSink = make([]byte, 1024)
+ }
+
+ // Do the interesting allocations.
+ allocateTransient1M()
+ allocateTransient2M()
+ allocatePersistent1K()
+ memSink = nil
+
+ runtime.GC() // materialize stats
+ if err := WriteHeapProfile(f); err != nil {
+ t.Fatalf("failed to write heap profile: %v", err)
+ }
+ f.Close()
+
+ memoryProfilerRun++
+ checkMemProfile(t, f.Name(), []string{"--alloc_space", "--show_bytes", "--lines"}, []string{
+ fmt.Sprintf(`%v .* runtime/pprof_test\.allocateTransient1M .*mprof_test.go:25`, 1<<20*memoryProfilerRun),
+ fmt.Sprintf(`%v .* runtime/pprof_test\.allocateTransient2M .*mprof_test.go:34`, 2<<20*memoryProfilerRun),
+ fmt.Sprintf(`%v .* runtime/pprof_test\.allocatePersistent1K .*mprof_test.go:47`, 1<<10*memoryProfilerRun),
+ }, []string{})
+
+ checkMemProfile(t, f.Name(), []string{"--inuse_space", "--show_bytes", "--lines"}, []string{
+ fmt.Sprintf(`%v .* runtime/pprof_test\.allocatePersistent1K .*mprof_test.go:47`, 1<<10*memoryProfilerRun),
+ }, []string{
+ "allocateTransient1M",
+ "allocateTransient2M",
+ })
+}
+
+func checkMemProfile(t *testing.T, file string, addArgs []string, what []string, whatnot []string) {
+ args := []string{"tool", "pprof", "--text"}
+ args = append(args, addArgs...)
+ args = append(args, os.Args[0], file)
+ out, err := exec.Command("go", args...).CombinedOutput()
+ if err != nil {
+ t.Fatalf("failed to execute pprof: %v\n%v\n", err, string(out))
+ }
+
+ matched := make(map[*regexp.Regexp]bool)
+ for _, s := range what {
+ matched[regexp.MustCompile(s)] = false
+ }
+ var not []*regexp.Regexp
+ for _, s := range whatnot {
+ not = append(not, regexp.MustCompile(s))
+ }
+
+ s := bufio.NewScanner(bytes.NewReader(out))
+ for s.Scan() {
+ ln := s.Text()
+ for re := range matched {
+ if re.MatchString(ln) {
+ if matched[re] {
+ t.Errorf("entry '%s' is matched twice", re.String())
+ }
+ matched[re] = true
+ }
+ }
+ for _, re := range not {
+ if re.MatchString(ln) {
+ t.Errorf("entry '%s' is matched, but must not", re.String())
+ }
+ }
+ }
+ for re, ok := range matched {
+ if !ok {
+ t.Errorf("entry '%s' is not matched", re.String())
+ }
+ }
+ if t.Failed() {
+ t.Logf("profile:\n%v", string(out))
+ }
+}
diff --git a/src/testing/testing.go b/src/testing/testing.go
index f91d860a9..e54a3b8ce 100644
--- a/src/testing/testing.go
+++ b/src/testing/testing.go
@@ -620,6 +620,7 @@ func after() {
fmt.Fprintf(os.Stderr, "testing: %s\n", err)
os.Exit(2)
}
+ runtime.GC() // materialize all statistics
if err = pprof.WriteHeapProfile(f); err != nil {
fmt.Fprintf(os.Stderr, "testing: can't write %s: %s\n", *memProfile, err)
os.Exit(2)