Initial hacky version of inspect source command

This commit is contained in:
2018-07-13 01:56:17 +01:00
parent 7cf43c59dd
commit eff9aa36f6
10 changed files with 309 additions and 635 deletions

View File

@@ -1,39 +1,25 @@
package cmd
import (
"fmt"
"github.com/jimeh/rbheap/inspect"
"github.com/spf13/cobra"
)
// inspectCmd represents the inspect command
var inspectCmd = &cobra.Command{
Use: "inspect [flags] <dump-file>",
Use: "inspect",
Short: "Inspect ObjectSpace dumps from Ruby proceees",
Run: func(cmd *cobra.Command, args []string) {
if len(args) != 1 {
usage_er(cmd, fmt.Sprintf("requires 1 arg, received %d", len(args)))
}
inspector := inspect.New(args[0])
inspector.Verbose = inspectOpts.Verbose
inspector.Process()
inspector.PrintCountByFileAndLines()
},
}
var inspectOpts = struct {
Verbose bool
Output string
}{}
func init() {
rootCmd.AddCommand(inspectCmd)
inspectCmd.PersistentFlags().BoolVarP(
&inspectOpts.Verbose,
inspectCmd.PersistentFlags().BoolVarP(&inspectOpts.Verbose,
"verbose", "v", false,
"print verbose information",
"print verbose information to STDERR",
)
}

60
cmd/inspectSource.go Normal file
View File

@@ -0,0 +1,60 @@
package cmd
import (
"fmt"
"os"
"github.com/jimeh/rbheap/inspect"
"github.com/spf13/cobra"
)
// inspectSourceCmd represents the inspectSource command
var inspectSourceCmd = &cobra.Command{
Use: "source [flags] <dump-file>",
Short: "Group objects by source filename and line number",
Run: func(cmd *cobra.Command, args []string) {
if len(args) != 1 {
usage_er(cmd, fmt.Sprintf("requires 1 arg, received %d", len(args)))
}
inspector := inspect.NewSourceInspector(args[0])
inspector.Verbose = inspectOpts.Verbose
inspector.SortBy = inspectSourceOpts.SortBy
inspector.Limit = inspectSourceOpts.Limit
inspector.Load()
switch inspectSourceOpts.Breakdown {
case "file":
inspector.ByFile(os.Stdout)
case "line":
inspector.ByLine(os.Stdout)
default:
usage_er(cmd, "Invalid --breakdown option")
}
},
}
var inspectSourceOpts = struct {
Breakdown string
SortBy string
Limit int
}{}
func init() {
inspectCmd.AddCommand(inspectSourceCmd)
inspectSourceCmd.Flags().StringVarP(&inspectSourceOpts.Breakdown,
"breakdown", "b", "line",
"Breakdown sources by \"line\" or \"file\"",
)
inspectSourceCmd.Flags().StringVarP(&inspectSourceOpts.SortBy,
"sort", "s", "count",
"Sort by \"count\", \"memsize\", or \"bytesize\"",
)
inspectSourceCmd.Flags().IntVarP(&inspectSourceOpts.Limit,
"limit", "l", 0,
"Limit number of results to show",
)
}

52
inspect/base_inspector.go Normal file
View File

@@ -0,0 +1,52 @@
package inspect
import (
"fmt"
"io"
"os"
"time"
)
type BaseInspector struct {
FilePath string
Dump *Dump
Verbose bool
VerboseWriter io.Writer
}
func (s *SourceInspector) Load() error {
start := time.Now()
s.verbose(fmt.Sprintf("Loading %s...", s.FilePath))
err := s.Dump.Load()
if err != nil {
return err
}
elapsed := time.Now().Sub(start)
s.verbose(fmt.Sprintf(
"Loaded %d objects in %.6f seconds",
len(s.Dump.Objects),
elapsed.Seconds(),
))
return nil
}
func (s *BaseInspector) log(msg string) {
if s.Verbose {
fmt.Println(msg)
}
}
func (s *BaseInspector) verbose(msg string) {
if s.Verbose {
w := s.VerboseWriter
if w == nil {
w = os.Stderr
}
fmt.Fprintln(w, msg)
}
}

View File

@@ -4,7 +4,6 @@ import (
"bufio"
"io"
"os"
"strconv"
)
func NewDump(filePath string) *Dump {
@@ -14,14 +13,14 @@ func NewDump(filePath string) *Dump {
// Dump contains all relevant data for a single heap dump.
type Dump struct {
FilePath string
ByAddress map[string]*Object
Objects map[string]*Object
ByFile map[string][]*Object
ByFileAndLine map[string][]*Object
ByGeneration map[int][]*Object
}
// Process processes the heap dump referenced in FilePath.
func (s *Dump) Process() error {
// Load processes the heap dump referenced in FilePath.
func (s *Dump) Load() error {
file, err := os.Open(s.FilePath)
defer file.Close()
@@ -29,10 +28,7 @@ func (s *Dump) Process() error {
return err
}
s.ByAddress = map[string]*Object{}
s.ByFile = map[string][]*Object{}
s.ByFileAndLine = map[string][]*Object{}
s.ByGeneration = map[int][]*Object{}
s.Objects = map[string]*Object{}
reader := bufio.NewReader(file)
for {
@@ -54,21 +50,12 @@ func (s *Dump) Process() error {
return nil
}
func (s *Dump) Lookup(address string) (*Object, bool) {
object, ok := s.Objects[address]
return object, ok
}
// AddObject adds a *Object to the Dump.
func (s *Dump) AddObject(obj *Object) {
s.ByAddress[obj.Address] = obj
if obj.File != "" {
s.ByFile[obj.File] = append(s.ByFile[obj.File], obj)
}
if obj.File != "" && obj.Line != 0 {
key := obj.File + ":" + strconv.Itoa(obj.Line)
s.ByFileAndLine[key] = append(s.ByFileAndLine[key], obj)
}
if obj.Generation != 0 {
s.ByGeneration[obj.Generation] =
append(s.ByGeneration[obj.Generation], obj)
}
s.Objects[obj.Address] = obj
}

30
inspect/file.go Normal file
View File

@@ -0,0 +1,30 @@
package inspect
// NewFile creates a new File.
func NewFile(filePath string) *File {
return &File{
FilePath: filePath,
ObjectMap: map[string]*Object{},
}
}
// File represents a source file and the lines and objects allocated by them.
type File struct {
FilePath string
ObjectMap map[string]*Object
ObjectCount int
ByteSize int64
MemSize int64
}
// Add adds a object to a File.
func (s *File) Add(obj *Object) {
_, ok := s.ObjectMap[obj.Address]
if !ok && obj.File != "" && obj.Line != 0 {
s.ObjectCount++
s.ByteSize = s.ByteSize + obj.ByteSize
s.MemSize = s.MemSize + obj.MemSize
s.ObjectMap[obj.Address] = obj
}
}

View File

@@ -1,47 +0,0 @@
package inspect
import (
"fmt"
"time"
)
func New(filePath string) *Inspector {
return &Inspector{
FilePath: filePath,
Dump: NewDump(filePath),
}
}
type Inspector struct {
FilePath string
Dump *Dump
Verbose bool
}
func (s *Inspector) Process() {
start := time.Now()
s.log(fmt.Sprintf("Parsing %s", s.FilePath))
s.Dump.Process()
elapsed := time.Now().Sub(start)
s.log(fmt.Sprintf(
"Parsed %d objects in %.6f seconds",
len(s.Dump.ByAddress),
elapsed.Seconds(),
))
}
func (s *Inspector) PrintCountByFileAndLines() {
for k, objects := range s.Dump.ByFileAndLine {
fmt.Printf("%s: %d objects\n", k, len(objects))
}
}
func (s *Inspector) log(msg string) {
if s.Verbose {
fmt.Println(msg)
}
}

32
inspect/line.go Normal file
View File

@@ -0,0 +1,32 @@
package inspect
// NewLine creates a new Line.
func NewLine(filePath string, lineNum int) *Line {
return &Line{
FilePath: filePath,
LineNum: lineNum,
ObjectMap: map[string]*Object{},
}
}
// Line represents a source line within a file and the objects allocated by it.
type Line struct {
FilePath string
LineNum int
ObjectMap map[string]*Object
ObjectCount int
ByteSize int64
MemSize int64
}
// Add adds a Object to a Line.
func (s *Line) Add(obj *Object) {
_, ok := s.ObjectMap[obj.Address]
if !ok && obj.File != "" && obj.Line != 0 {
s.ObjectCount++
s.ByteSize = s.ByteSize + obj.ByteSize
s.MemSize = s.MemSize + obj.MemSize
s.ObjectMap[obj.Address] = obj
}
}

View File

@@ -17,7 +17,7 @@ func NewObject(inputJSON []byte) (*Object, error) {
// `ObjectSpace.dump_all`.
type Object struct {
Address string `json:"address"`
Bytesize int `json:"bytesize"`
ByteSize int64 `json:"bytesize"`
Capacity int `json:"capacity"`
Class string `json:"class"`
Default string `json:"default"`
@@ -31,15 +31,15 @@ type Object struct {
Generation int `json:"generation"`
ImemoType string `json:"imemo_type"`
Ivars int `json:"ivars"`
Length int `json:"length"`
Length int64 `json:"length"`
Line int `json:"line"`
Memsize int `json:"memsize"`
MemSize int64 `json:"memsize"`
Method string `json:"method"`
Name string `json:"name"`
References ObjectReferences `json:"references"`
Root string `json:"root"`
Shared bool `json:"shared"`
Size int `json:"size"`
Size int64 `json:"size"`
Struct string `json:"struct"`
Type string `json:"type"`
Value string `json:"value"`

View File

@@ -1,543 +0,0 @@
// Code generated by easyjson for marshaling/unmarshaling. DO NOT EDIT.
package inspect
import (
json "encoding/json"
easyjson "github.com/mailru/easyjson"
jlexer "github.com/mailru/easyjson/jlexer"
jwriter "github.com/mailru/easyjson/jwriter"
)
// suppress unused package warning
var (
_ *json.RawMessage
_ *jlexer.Lexer
_ *jwriter.Writer
_ easyjson.Marshaler
)
func easyjsonE44bcf2dDecodeGithubComJimehRbheapInspect(in *jlexer.Lexer, out *ObjectFlags) {
isTopLevel := in.IsStart()
if in.IsNull() {
if isTopLevel {
in.Consumed()
}
in.Skip()
return
}
in.Delim('{')
for !in.IsDelim('}') {
key := in.UnsafeString()
in.WantColon()
if in.IsNull() {
in.Skip()
in.WantComma()
continue
}
switch key {
case "marked":
out.Marked = bool(in.Bool())
case "old":
out.Old = bool(in.Bool())
case "uncollectible":
out.Uncollectible = bool(in.Bool())
case "wb_protected":
out.WbProtected = bool(in.Bool())
default:
in.SkipRecursive()
}
in.WantComma()
}
in.Delim('}')
if isTopLevel {
in.Consumed()
}
}
func easyjsonE44bcf2dEncodeGithubComJimehRbheapInspect(out *jwriter.Writer, in ObjectFlags) {
out.RawByte('{')
first := true
_ = first
{
const prefix string = ",\"marked\":"
if first {
first = false
out.RawString(prefix[1:])
} else {
out.RawString(prefix)
}
out.Bool(bool(in.Marked))
}
{
const prefix string = ",\"old\":"
if first {
first = false
out.RawString(prefix[1:])
} else {
out.RawString(prefix)
}
out.Bool(bool(in.Old))
}
{
const prefix string = ",\"uncollectible\":"
if first {
first = false
out.RawString(prefix[1:])
} else {
out.RawString(prefix)
}
out.Bool(bool(in.Uncollectible))
}
{
const prefix string = ",\"wb_protected\":"
if first {
first = false
out.RawString(prefix[1:])
} else {
out.RawString(prefix)
}
out.Bool(bool(in.WbProtected))
}
out.RawByte('}')
}
// MarshalJSON supports json.Marshaler interface
func (v ObjectFlags) MarshalJSON() ([]byte, error) {
w := jwriter.Writer{}
easyjsonE44bcf2dEncodeGithubComJimehRbheapInspect(&w, v)
return w.Buffer.BuildBytes(), w.Error
}
// MarshalEasyJSON supports easyjson.Marshaler interface
func (v ObjectFlags) MarshalEasyJSON(w *jwriter.Writer) {
easyjsonE44bcf2dEncodeGithubComJimehRbheapInspect(w, v)
}
// UnmarshalJSON supports json.Unmarshaler interface
func (v *ObjectFlags) UnmarshalJSON(data []byte) error {
r := jlexer.Lexer{Data: data}
easyjsonE44bcf2dDecodeGithubComJimehRbheapInspect(&r, v)
return r.Error()
}
// UnmarshalEasyJSON supports easyjson.Unmarshaler interface
func (v *ObjectFlags) UnmarshalEasyJSON(l *jlexer.Lexer) {
easyjsonE44bcf2dDecodeGithubComJimehRbheapInspect(l, v)
}
func easyjsonE44bcf2dDecodeGithubComJimehRbheapInspect1(in *jlexer.Lexer, out *Object) {
isTopLevel := in.IsStart()
if in.IsNull() {
if isTopLevel {
in.Consumed()
}
in.Skip()
return
}
in.Delim('{')
for !in.IsDelim('}') {
key := in.UnsafeString()
in.WantColon()
if in.IsNull() {
in.Skip()
in.WantComma()
continue
}
switch key {
case "address":
out.Address = string(in.String())
case "bytesize":
out.Bytesize = int(in.Int())
case "capacity":
out.Capacity = int(in.Int())
case "class":
out.Class = string(in.String())
case "default":
out.Default = string(in.String())
case "embedded":
out.Embedded = bool(in.Bool())
case "encoding":
out.Encoding = string(in.String())
case "fd":
out.Fd = int(in.Int())
case "file":
out.File = string(in.String())
case "flags":
if data := in.Raw(); in.Ok() {
in.AddError((out.Flags).UnmarshalJSON(data))
}
case "frozen":
out.Frozen = bool(in.Bool())
case "fstring":
out.Fstring = bool(in.Bool())
case "generation":
out.Generation = int(in.Int())
case "imemo_type":
out.ImemoType = string(in.String())
case "ivars":
out.Ivars = int(in.Int())
case "length":
out.Length = int(in.Int())
case "line":
out.Line = int(in.Int())
case "memsize":
out.Memsize = int(in.Int())
case "method":
out.Method = string(in.String())
case "name":
out.Name = string(in.String())
case "references":
if in.IsNull() {
in.Skip()
out.References = nil
} else {
in.Delim('[')
if out.References == nil {
if !in.IsDelim(']') {
out.References = make(ObjectReferences, 0, 4)
} else {
out.References = ObjectReferences{}
}
} else {
out.References = (out.References)[:0]
}
for !in.IsDelim(']') {
var v1 string
v1 = string(in.String())
out.References = append(out.References, v1)
in.WantComma()
}
in.Delim(']')
}
case "root":
out.Root = string(in.String())
case "shared":
out.Shared = bool(in.Bool())
case "size":
out.Size = int(in.Int())
case "struct":
out.Struct = string(in.String())
case "type":
out.Type = string(in.String())
case "value":
out.Value = string(in.String())
default:
in.SkipRecursive()
}
in.WantComma()
}
in.Delim('}')
if isTopLevel {
in.Consumed()
}
}
func easyjsonE44bcf2dEncodeGithubComJimehRbheapInspect1(out *jwriter.Writer, in Object) {
out.RawByte('{')
first := true
_ = first
{
const prefix string = ",\"address\":"
if first {
first = false
out.RawString(prefix[1:])
} else {
out.RawString(prefix)
}
out.String(string(in.Address))
}
{
const prefix string = ",\"bytesize\":"
if first {
first = false
out.RawString(prefix[1:])
} else {
out.RawString(prefix)
}
out.Int(int(in.Bytesize))
}
{
const prefix string = ",\"capacity\":"
if first {
first = false
out.RawString(prefix[1:])
} else {
out.RawString(prefix)
}
out.Int(int(in.Capacity))
}
{
const prefix string = ",\"class\":"
if first {
first = false
out.RawString(prefix[1:])
} else {
out.RawString(prefix)
}
out.String(string(in.Class))
}
{
const prefix string = ",\"default\":"
if first {
first = false
out.RawString(prefix[1:])
} else {
out.RawString(prefix)
}
out.String(string(in.Default))
}
{
const prefix string = ",\"embedded\":"
if first {
first = false
out.RawString(prefix[1:])
} else {
out.RawString(prefix)
}
out.Bool(bool(in.Embedded))
}
{
const prefix string = ",\"encoding\":"
if first {
first = false
out.RawString(prefix[1:])
} else {
out.RawString(prefix)
}
out.String(string(in.Encoding))
}
{
const prefix string = ",\"fd\":"
if first {
first = false
out.RawString(prefix[1:])
} else {
out.RawString(prefix)
}
out.Int(int(in.Fd))
}
{
const prefix string = ",\"file\":"
if first {
first = false
out.RawString(prefix[1:])
} else {
out.RawString(prefix)
}
out.String(string(in.File))
}
{
const prefix string = ",\"flags\":"
if first {
first = false
out.RawString(prefix[1:])
} else {
out.RawString(prefix)
}
out.Raw((in.Flags).MarshalJSON())
}
{
const prefix string = ",\"frozen\":"
if first {
first = false
out.RawString(prefix[1:])
} else {
out.RawString(prefix)
}
out.Bool(bool(in.Frozen))
}
{
const prefix string = ",\"fstring\":"
if first {
first = false
out.RawString(prefix[1:])
} else {
out.RawString(prefix)
}
out.Bool(bool(in.Fstring))
}
{
const prefix string = ",\"generation\":"
if first {
first = false
out.RawString(prefix[1:])
} else {
out.RawString(prefix)
}
out.Int(int(in.Generation))
}
{
const prefix string = ",\"imemo_type\":"
if first {
first = false
out.RawString(prefix[1:])
} else {
out.RawString(prefix)
}
out.String(string(in.ImemoType))
}
{
const prefix string = ",\"ivars\":"
if first {
first = false
out.RawString(prefix[1:])
} else {
out.RawString(prefix)
}
out.Int(int(in.Ivars))
}
{
const prefix string = ",\"length\":"
if first {
first = false
out.RawString(prefix[1:])
} else {
out.RawString(prefix)
}
out.Int(int(in.Length))
}
{
const prefix string = ",\"line\":"
if first {
first = false
out.RawString(prefix[1:])
} else {
out.RawString(prefix)
}
out.Int(int(in.Line))
}
{
const prefix string = ",\"memsize\":"
if first {
first = false
out.RawString(prefix[1:])
} else {
out.RawString(prefix)
}
out.Int(int(in.Memsize))
}
{
const prefix string = ",\"method\":"
if first {
first = false
out.RawString(prefix[1:])
} else {
out.RawString(prefix)
}
out.String(string(in.Method))
}
{
const prefix string = ",\"name\":"
if first {
first = false
out.RawString(prefix[1:])
} else {
out.RawString(prefix)
}
out.String(string(in.Name))
}
{
const prefix string = ",\"references\":"
if first {
first = false
out.RawString(prefix[1:])
} else {
out.RawString(prefix)
}
if in.References == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 {
out.RawString("null")
} else {
out.RawByte('[')
for v2, v3 := range in.References {
if v2 > 0 {
out.RawByte(',')
}
out.String(string(v3))
}
out.RawByte(']')
}
}
{
const prefix string = ",\"root\":"
if first {
first = false
out.RawString(prefix[1:])
} else {
out.RawString(prefix)
}
out.String(string(in.Root))
}
{
const prefix string = ",\"shared\":"
if first {
first = false
out.RawString(prefix[1:])
} else {
out.RawString(prefix)
}
out.Bool(bool(in.Shared))
}
{
const prefix string = ",\"size\":"
if first {
first = false
out.RawString(prefix[1:])
} else {
out.RawString(prefix)
}
out.Int(int(in.Size))
}
{
const prefix string = ",\"struct\":"
if first {
first = false
out.RawString(prefix[1:])
} else {
out.RawString(prefix)
}
out.String(string(in.Struct))
}
{
const prefix string = ",\"type\":"
if first {
first = false
out.RawString(prefix[1:])
} else {
out.RawString(prefix)
}
out.String(string(in.Type))
}
{
const prefix string = ",\"value\":"
if first {
first = false
out.RawString(prefix[1:])
} else {
out.RawString(prefix)
}
out.String(string(in.Value))
}
out.RawByte('}')
}
// MarshalJSON supports json.Marshaler interface
func (v Object) MarshalJSON() ([]byte, error) {
w := jwriter.Writer{}
easyjsonE44bcf2dEncodeGithubComJimehRbheapInspect1(&w, v)
return w.Buffer.BuildBytes(), w.Error
}
// MarshalEasyJSON supports easyjson.Marshaler interface
func (v Object) MarshalEasyJSON(w *jwriter.Writer) {
easyjsonE44bcf2dEncodeGithubComJimehRbheapInspect1(w, v)
}
// UnmarshalJSON supports json.Unmarshaler interface
func (v *Object) UnmarshalJSON(data []byte) error {
r := jlexer.Lexer{Data: data}
easyjsonE44bcf2dDecodeGithubComJimehRbheapInspect1(&r, v)
return r.Error()
}
// UnmarshalEasyJSON supports easyjson.Unmarshaler interface
func (v *Object) UnmarshalEasyJSON(l *jlexer.Lexer) {
easyjsonE44bcf2dDecodeGithubComJimehRbheapInspect1(l, v)
}

117
inspect/source_inspector.go Normal file
View File

@@ -0,0 +1,117 @@
package inspect
import (
"fmt"
"io"
"sort"
)
// NewSourceInspector creates a new SourceInspector.
func NewSourceInspector(filePath string) *SourceInspector {
return &SourceInspector{
BaseInspector: BaseInspector{
FilePath: filePath,
Dump: NewDump(filePath),
},
}
}
// SourceInspector inspects memory dumps based on the file and line that
// allocated the memory.
type SourceInspector struct {
BaseInspector
SortBy string
Limit int
FileMap map[string]*File
}
// ByFile writes file and line details to given io.Writer.
func (s *SourceInspector) ByFile(w io.Writer) {
fileMap := map[string]*File{}
files := []*File{}
for _, obj := range s.Dump.Objects {
if _, ok := fileMap[obj.File]; !ok {
file := NewFile(obj.File)
files = append(files, file)
fileMap[obj.File] = file
}
fileMap[obj.File].Add(obj)
}
switch s.SortBy {
case "memsize":
sort.Slice(files, func(i, j int) bool {
return files[i].MemSize > files[j].MemSize
})
case "bytesize":
sort.Slice(files, func(i, j int) bool {
return files[i].ByteSize > files[j].ByteSize
})
default:
sort.Slice(files, func(i, j int) bool {
return files[i].ObjectCount > files[j].ObjectCount
})
}
for i, file := range files {
fmt.Fprintf(w,
"%s (objects: %d, bytesize: %d, memsize: %d)\n",
file.FilePath,
file.ObjectCount,
file.ByteSize,
file.MemSize,
)
i++
if s.Limit != 0 && i >= s.Limit {
break
}
}
}
// ByLine writes file and line details to given io.Writer.
func (s *SourceInspector) ByLine(w io.Writer) {
lineMap := map[string]*Line{}
lines := []*Line{}
for _, obj := range s.Dump.Objects {
if _, ok := lineMap[obj.File]; !ok {
line := NewLine(obj.File, obj.Line)
lines = append(lines, line)
lineMap[obj.File] = line
}
lineMap[obj.File].Add(obj)
}
switch s.SortBy {
case "memsize":
sort.Slice(lines, func(i, j int) bool {
return lines[i].MemSize > lines[j].MemSize
})
case "bytesize":
sort.Slice(lines, func(i, j int) bool {
return lines[i].ByteSize > lines[j].ByteSize
})
default:
sort.Slice(lines, func(i, j int) bool {
return lines[i].ObjectCount > lines[j].ObjectCount
})
}
for i, file := range lines {
fmt.Fprintf(w,
"%s:%d (objects: %d, bytesize: %d, memsize: %d)\n",
file.FilePath,
file.LineNum,
file.ObjectCount,
file.ByteSize,
file.MemSize,
)
i++
if s.Limit != 0 && i >= s.Limit {
break
}
}
}