Move core logic to LeakFinder struct

This commit is contained in:
2018-07-08 01:36:25 +01:00
parent c6425e5699
commit 87bc205a17
6 changed files with 110 additions and 117 deletions

View File

@@ -1,28 +0,0 @@
package main
// DiffSect removes all items in `a` from `b`, then removes all items from `b`
// which are not in `c`. Effectively: intersect(difference(b, a), c)
func DiffSect(a, b, c *[]string) *[]string {
result := []string{}
mapA := map[string]bool{}
mapC := map[string]bool{}
for _, x := range *a {
mapA[x] = true
}
for _, x := range *c {
mapC[x] = true
}
for _, x := range *b {
_, okA := mapA[x]
_, okC := mapC[x]
if !okA && okC {
result = append(result, x)
}
}
return &result
}

View File

@@ -1,34 +0,0 @@
package main
import (
"testing"
"github.com/stretchr/testify/assert"
)
func TestDiffSect(t *testing.T) {
tests := []struct {
a []string
b []string
c []string
result []string
}{
{
a: []string{"1", "2", "3"},
b: []string{"1", "2", "3", "4", "5", "6"},
c: []string{"1", "2", "3", "4", "5", "6", "7", "8", "9"},
result: []string{"4", "5", "6"},
},
{
a: []string{"1", "2", "3"},
b: []string{"1", "2", "4", "5", "6"},
c: []string{"1", "5", "6", "7", "8", "9"},
result: []string{"5", "6"},
},
}
for _, pair := range tests {
result := DiffSect(&pair.a, &pair.b, &pair.c)
assert.Equal(t, *result, pair.result)
}
}

View File

@@ -18,3 +18,7 @@ type Entry struct {
Offset int64
Index string
}
func (s *Entry) Address() string {
return s.Object.Address
}

77
leak_finder.go Normal file
View File

@@ -0,0 +1,77 @@
package main
import "fmt"
func NewLeakFinder(file1, file2, file3 string) *LeakFinder {
return &LeakFinder{
FilePaths: [3]string{file1, file2, file3},
}
}
type LeakFinder struct {
FilePaths [3]string
Dumps [3]*ObjectDump
Leaks []*string
Verbose bool
}
func (s *LeakFinder) Process() error {
for i, filePath := range s.FilePaths {
s.log(fmt.Sprintf("Parsing %s", filePath))
dump := NewObjectDump(filePath)
err := dump.Process()
if err != nil {
return err
}
s.Dumps[i] = dump
s.log(fmt.Sprintf("Parsed %d objects", len(dump.Index)))
}
return nil
}
func (s *LeakFinder) PrintLeakedAddresses() {
s.log("\nLeaked Addresses:")
s.Dumps[1].PrintEntryAddress(s.FindLeaks())
}
func (s *LeakFinder) PrintLeakedObjects() {
s.log("\nLeaked Objects:")
s.Dumps[1].PrintEntryJSON(s.FindLeaks())
}
func (s *LeakFinder) FindLeaks() []*string {
if s.Leaks != nil {
return s.Leaks
}
mapA := map[string]bool{}
mapC := map[string]bool{}
for _, x := range s.Dumps[0].Index {
mapA[*x] = true
}
for _, x := range s.Dumps[2].Index {
mapC[*x] = true
}
for _, x := range s.Dumps[1].Index {
_, okA := mapA[*x]
_, okC := mapC[*x]
if !okA && okC {
s.Leaks = append(s.Leaks, x)
}
}
return s.Leaks
}
func (s *LeakFinder) log(msg string) {
if s.Verbose {
fmt.Println(msg)
}
}

42
main.go
View File

@@ -49,51 +49,21 @@ func versionString() string {
return buffer.String()
}
func logMsg(msg string) {
if !*silentFlag {
fmt.Println(msg)
}
}
func loadDump(filePath string) (*ObjectDump, error) {
logMsg(fmt.Sprintf("--> Loading %s...", filePath))
dump, err := NewObjectDump(filePath)
logMsg(fmt.Sprintf(" Loaded %d addresses", len(dump.Index)))
return dump, err
}
func printHexDiff(leaked *[]string, dump *ObjectDump) {
for _, index := range *leaked {
if entry, ok := dump.Entries[index]; ok {
fmt.Println(entry.Object.Address)
}
}
}
func main() {
kingpin.Version(versionString())
kingpin.Parse()
dump1, err := loadDump(*file1Path)
finder := NewLeakFinder(*file1Path, *file2Path, *file3Path)
finder.Verbose = !*silentFlag
err := finder.Process()
if err != nil {
log.Fatal(err)
}
dump2, err := loadDump(*file2Path)
if err != nil {
log.Fatal(err)
}
dump3, err := loadDump(*file3Path)
if err != nil {
log.Fatal(err)
}
leaked := DiffSect(&dump1.Index, &dump2.Index, &dump3.Index)
if *formatFlag == "hex" {
printHexDiff(leaked, dump2)
finder.PrintLeakedAddresses()
} else if *formatFlag == "full" {
dump2.PrintMatchingJSON(leaked)
finder.PrintLeakedObjects()
}
}

View File

@@ -8,16 +8,14 @@ import (
"sort"
)
func NewObjectDump(file string) (*ObjectDump, error) {
heapDump := ObjectDump{File: file}
err := heapDump.Process()
return &heapDump, err
func NewObjectDump(file string) *ObjectDump {
return &ObjectDump{File: file}
}
// ObjectDump contains all relevant data for a single heap dump.
type ObjectDump struct {
File string
Index []string
Index []*string
Entries map[string]*Entry
}
@@ -32,8 +30,8 @@ func (s *ObjectDump) Process() error {
s.Entries = map[string]*Entry{}
reader := bufio.NewReader(file)
var offset int64 = -1
reader := bufio.NewReader(file)
for {
offset++
line, err := reader.ReadBytes(byte('\n'))
@@ -50,13 +48,21 @@ func (s *ObjectDump) Process() error {
entry.Offset = offset
s.Entries[entry.Index] = entry
s.Index = append(s.Index, entry.Index)
s.Index = append(s.Index, &entry.Index)
}
return nil
}
func (s *ObjectDump) PrintMatchingJSON(indexes *[]string) error {
func (s *ObjectDump) PrintEntryAddress(indexes []*string) {
for _, index := range indexes {
if entry, ok := s.Entries[*index]; ok {
fmt.Println(entry.Address())
}
}
}
func (s *ObjectDump) PrintEntryJSON(indexes []*string) error {
file, err := os.Open(s.File)
defer file.Close()
@@ -64,12 +70,10 @@ func (s *ObjectDump) PrintMatchingJSON(indexes *[]string) error {
return err
}
reader := bufio.NewReader(file)
offsets := s.matchingOffsets(indexes)
var current int64 = 0
offsets := s.sortedOffsets(indexes)
var current int64
var offset int64 = -1
reader := bufio.NewReader(file)
for {
offset++
line, err := reader.ReadBytes(byte('\n'))
@@ -88,13 +92,13 @@ func (s *ObjectDump) PrintMatchingJSON(indexes *[]string) error {
return nil
}
func (s *ObjectDump) matchingOffsets(indexes *[]string) []int64 {
var offsets []int64
func (s *ObjectDump) sortedOffsets(indexes []*string) []int64 {
var res []int64
for _, index := range *indexes {
offsets = append(offsets, s.Entries[index].Offset)
for _, index := range indexes {
res = append(res, s.Entries[*index].Offset)
}
sort.Slice(res, func(i, j int) bool { return res[i] < res[j] })
sort.Slice(offsets, func(i, j int) bool { return offsets[i] < offsets[j] })
return offsets
return res
}