Skip to content

Commit

Permalink
Merge pull request #39 from Halleck45/feat_fanout
Browse files Browse the repository at this point in the history
 step for fanIn/fanOut (parameters, external dependencies), and support non-utf8 classnames
  • Loading branch information
Halleck45 committed Mar 29, 2024
2 parents e3ee675 + e90ea8b commit 873c58a
Show file tree
Hide file tree
Showing 14 changed files with 1,336 additions and 589 deletions.
10 changes: 9 additions & 1 deletion proto/NodeType.proto
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ message Stmts {
repeated StmtDecisionCase stmtDecisionCase = 11;
repeated StmtLoop stmtLoop = 12;
repeated StmtDecisionSwitch stmtDecisionSwitch = 13;
repeated StmtExternalDependency stmtExternalDependencies = 14;
}

// Represents a file
Expand Down Expand Up @@ -92,9 +93,16 @@ message StmtFunction {
LinesOfCode linesOfCode = 9;
}

// Represents a Parameter node (for function)
message StmtParameter {
string name = 1;
Name type = 2;
string type = 2;
}

// Represents a external dependency node. Used when code call new X() or X::method() for example
message StmtExternalDependency {
string className = 1;
string functionName = 2;
}

// Represents a Interface node.
Expand Down
18 changes: 13 additions & 5 deletions src/Analyzer/Aggregator.go
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ type ProjectAggregated struct {
ByClass Aggregated
Combined Aggregated
ByProgrammingLanguage map[string]Aggregated
ErroredFiles []*pb.File
}

type Aggregated struct {
Expand Down Expand Up @@ -127,7 +128,6 @@ func newAggregated() Aggregated {
}

func (r *Aggregator) Aggregates() ProjectAggregated {
files := r.files

// We create a new aggregated object for each type of aggregation
// ByFile, ByClass, Combined
Expand All @@ -136,11 +136,19 @@ func (r *Aggregator) Aggregates() ProjectAggregated {
r.projectAggregated.Combined = newAggregated()

// Count files
r.projectAggregated.ByClass.NbFiles = len(files)
r.projectAggregated.ByFile.NbFiles = len(files)
r.projectAggregated.Combined.NbFiles = len(files)
r.projectAggregated.ByClass.NbFiles = len(r.files)
r.projectAggregated.ByFile.NbFiles = len(r.files)
r.projectAggregated.Combined.NbFiles = len(r.files)

for _, file := range files {
// Prepare errors
r.projectAggregated.ErroredFiles = make([]*pb.File, 0)

for _, file := range r.files {

// Files with errors
if file.Errors != nil && len(file.Errors) > 0 {
r.projectAggregated.ErroredFiles = append(r.projectAggregated.ErroredFiles, file)
}

if file.Stmts == nil {
continue
Expand Down
18 changes: 18 additions & 0 deletions src/Analyzer/Aggregator_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ import (
"testing"

pb "github.com/halleck45/ast-metrics/src/NodeType"
"github.com/stretchr/testify/assert"
"google.golang.org/protobuf/proto"
)

Expand Down Expand Up @@ -514,3 +515,20 @@ func TestCalculateMaintainabilityIndex(t *testing.T) {
t.Errorf("Expected AverageMIPerMethod, got %f", aggregated.AverageMIPerMethod)
}
}

func TestFIlesWithErrorAreDetected(t *testing.T) {
aggregator := Aggregator{}
files := []*pb.File{
&pb.File{
Stmts: &pb.Stmts{},
},
&pb.File{
Errors: []string{"Error1", "Error2"},
},
}
aggregator.files = files
aggregated := aggregator.Aggregates()

assert.Equal(t, 2, aggregated.ByFile.NbFiles)
assert.Equal(t, 1, len(aggregated.ErroredFiles))
}
5 changes: 0 additions & 5 deletions src/Analyzer/AstAnalyzer.go
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,6 @@ import (
"strconv"
"sync"

log "github.com/sirupsen/logrus"

Complexity "github.com/halleck45/ast-metrics/src/Analyzer/Complexity"
Component "github.com/halleck45/ast-metrics/src/Analyzer/Component"
Volume "github.com/halleck45/ast-metrics/src/Analyzer/Volume"
Expand Down Expand Up @@ -71,7 +69,6 @@ func executeFileAnalysis(file string, channelResult chan<- *pb.File) error {
// load AST via ProtoBuf (using NodeType package)
in, err := ioutil.ReadFile(file)
if err != nil {
log.Error("Error reading file: ", err)
if pbFile.Errors == nil {
pbFile.Errors = make([]string, 0)
}
Expand All @@ -82,7 +79,6 @@ func executeFileAnalysis(file string, channelResult chan<- *pb.File) error {

// if file is empty, return
if len(in) == 0 {
log.Error("File is empty: ", file)
if pbFile.Errors == nil {
pbFile.Errors = make([]string, 0)
}
Expand All @@ -92,7 +88,6 @@ func executeFileAnalysis(file string, channelResult chan<- *pb.File) error {
}

if err := proto.Unmarshal(in, pbFile); err != nil {
log.Errorln("Failed to parse address pbFile ("+file+"):", err)
if pbFile.Errors == nil {
pbFile.Errors = make([]string, 0)
}
Expand Down
6 changes: 3 additions & 3 deletions src/Analyzer/Volume/HalsteadMetricsVisitor_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -47,15 +47,15 @@ func TestHalsteadMetricsVisitor(t *testing.T) {
"parameters": [
{
"name": "a",
"type": {}
"type": ""
},
{
"name": "b",
"type": {}
"type": ""
},
{
"name": "c",
"type": {}
"type": ""
}
],
"linesOfCode": {
Expand Down
14 changes: 14 additions & 0 deletions src/Command/AnalyzeCommand.go
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ import (
"github.com/halleck45/ast-metrics/src/Storage"
"github.com/inancgumus/screen"
"github.com/pterm/pterm"
log "github.com/sirupsen/logrus"
)

type AnalyzeCommand struct {
Expand Down Expand Up @@ -198,6 +199,19 @@ func (v *AnalyzeCommand) Execute() error {
v.currentPage.Reset(allResults, projectAggregated)
}

// Details errors
if len(projectAggregated.ErroredFiles) > 0 {
pterm.Info.Printf("%d files could not be analyzed. Use the --verbose option to get details\n", len(projectAggregated.ErroredFiles))
if log.GetLevel() == log.DebugLevel {
for _, file := range projectAggregated.ErroredFiles {
pterm.Error.Println("File " + file.Path)
for _, err := range file.Errors {
pterm.Error.Println(" " + err)
}
}
}
}

// Link to file wartcher (in order to close it when app is closed)
if v.FileWatcher != nil {
v.currentPage.FileWatcher = v.FileWatcher
Expand Down
7 changes: 4 additions & 3 deletions src/Engine/Golang/GolangRunner.go
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,10 @@ func (r GolangRunner) DumpAST() {
protoFile := ParseGoFile(filePath)

// Dump protobuf object to destination
Engine.DumpProtobuf(protoFile, binPath)
err = Engine.DumpProtobuf(protoFile, binPath)
if err != nil {
log.Error(err)
}
}

if r.progressbar != nil {
Expand Down Expand Up @@ -179,8 +182,6 @@ func ParseGoFile(filePath string) *pb.File {
return file
}



// getFileList returns the list of PHP files to analyze, and caches it in memory
func (r *GolangRunner) getFileList() File.FileList {

Expand Down
11 changes: 7 additions & 4 deletions src/Engine/Php/PhpRunner.go
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ func (r PhpRunner) dumpOneAst(wg *sync.WaitGroup, filePath string) {
defer wg.Done()
hash, err := Engine.GetFileHash(filePath)
if err != nil {
log.Error(err)
log.Error("Error while hashing file " + filePath + ": " + err.Error())
}
binPath := Storage.OutputPath() + string(os.PathSeparator) + hash + ".bin"
// if file exists, skip it
Expand All @@ -94,7 +94,10 @@ func (r PhpRunner) dumpOneAst(wg *sync.WaitGroup, filePath string) {
protoFile, _ := parsePhpFile(filePath)

// Dump protobuf object to destination
Engine.DumpProtobuf(protoFile, binPath)
err = Engine.DumpProtobuf(protoFile, binPath)
if err != nil {
log.Error("Error while dumping file " + filePath + ": " + err.Error())
}
}

func parsePhpFile(filename string) (*pb.File, error) {
Expand Down Expand Up @@ -129,12 +132,12 @@ func parsePhpFile(filename string) (*pb.File, error) {
})

if err != nil {
log.Error("Error:" + err.Error())
parserErrors = append(parserErrors, errors.NewError(err.Error(), nil))
}

if len(parserErrors) > 0 {
for _, e := range parserErrors {
log.Println(e.String())
file.Errors = append(file.Errors, e.Msg)
}
}

Expand Down

0 comments on commit 873c58a

Please sign in to comment.