Permalink
Browse files

Adding logic to support Python 3.x

git-svn-id: https://software.sandia.gov/svn/public/fast/gcovr/trunk@2722 c9093e8b-e624-0410-814a-e3c5b6d4c2ac
  • Loading branch information...
1 parent fb61f9e commit 4860fb2b361c32db81b428f577e89ade914a6d7b @whart222 whart222 committed Jan 6, 2012
Showing with 50 additions and 59 deletions.
  1. +1 −1 gcovr/tests/test_gcovr.py
  2. +49 −58 scripts/gcovr
@@ -53,7 +53,7 @@ def gcovr_test_txt(self, name):
os.chdir(os.path.join(basedir,name))
run(["make"]) or self.fail("Make failed")
run(["make","txt"]) or self.fail("Execution failed")
- self.failUnlessFileEqualsBaseline("coverage.txt", "reference/coverage.txt")
+ self.assertFileEqualsBaseline("coverage.txt", "reference/coverage.txt")
run(["make","clean"]) or self.fail("Clean failed")
os.chdir(basedir)
View
@@ -63,27 +63,27 @@ class CoverageData(object):
self.noncode = copy.copy(noncode)
# But, a deep copy is required here
self.all_lines = copy.deepcopy(uncovered)
- self.all_lines.update(covered.keys())
+ self.all_lines.update(list(covered.keys()))
self.branches = copy.deepcopy(branches)
def update(self, uncovered, covered, branches, noncode):
self.all_lines.update(uncovered)
- self.all_lines.update(covered.keys())
+ self.all_lines.update(list(covered.keys()))
self.uncovered.update(uncovered)
self.noncode.intersection_update(noncode)
- for k in covered.keys():
+ for k in list(covered.keys()):
self.covered[k] = self.covered.get(k,0) + covered[k]
- for k in branches.keys():
+ for k in list(branches.keys()):
for b in branches[k]:
d = self.branches.setdefault(k, {})
d[b] = d.get(b, 0) + branches[k][b]
- self.uncovered.difference_update(self.covered.keys())
+ self.uncovered.difference_update(list(self.covered.keys()))
def uncovered_str(self):
if options.show_branch:
# Don't do any aggregation on branch results
tmp = []
- for line in self.branches.keys():
+ for line in list(self.branches.keys()):
for branch in self.branches[line]:
if self.branches[line][branch] == 0:
tmp.append(line)
@@ -101,14 +101,13 @@ class CoverageData(object):
last = None
ranges=[]
for item in tmp:
- #print "HERE",item
if last is None:
first=item
last=item
elif item == (last+1):
last=item
else:
- if len(self.noncode.intersection(range(last+1,item))) \
+ if len(self.noncode.intersection(list(range(last+1,item)))) \
== item - last - 1:
last = item
continue
@@ -129,8 +128,8 @@ class CoverageData(object):
if ( options.show_branch ):
total = 0
cover = 0
- for line in self.branches.keys():
- for branch in self.branches[line].keys():
+ for line in list(self.branches.keys()):
+ for branch in list(self.branches[line].keys()):
total += 1
cover += self.branches[line][branch] > 0 and 1 or 0
else:
@@ -173,7 +172,7 @@ def search_file(expr, path=None, abspath=False, follow_links=False):
if path is None or path == ".":
path = os.getcwd()
elif not os.path.exists(path):
- raise IOError, "Unknown directory '"+path+"'"
+ raise IOError("Unknown directory '"+path+"'")
for root, dirs, files in os.walk(path, topdown=True):
for name in files:
if pattern.match(name):
@@ -194,10 +193,10 @@ def get_datafiles(flist, options, ext="gcda"):
allfiles=[]
for dir in flist:
if options.verbose:
- print "Scanning directory "+dir+" for "+ext+" files..."
+ sys.stdout.write("Scanning directory "+dir+" for "+ext+" files...\n")
files = search_file(".*\."+ext, dir, abspath=True, follow_links=True)
if options.verbose:
- print "Found %d files " % len(files)
+ sys.stdout.write("Found %d files \n" % len(files))
allfiles += files
return allfiles
@@ -216,13 +215,13 @@ def process_gcov_data(file, covdata, options):
#fname = os.path.dirname((segments[-1]).strip())+os.sep+fname
fname = os.path.abspath(fname)
if options.verbose:
- print "Parsing coverage data for file "+fname
+ sys.stdout.write("Parsing coverage data for file %s\n" % fname)
#
# Return if the filename does not match the filter
#
if options.filter is not None and not options.filter.match(fname):
if options.verbose:
- print " Filtering coverage data for file "+fname
+ sys.stdout.write(" Filtering coverage data for file %s\n" % fname)
return
#
# Return if the filename matches the exclude pattern
@@ -232,7 +231,7 @@ def process_gcov_data(file, covdata, options):
options.exclude[i].match(fname) or \
options.exclude[i].match(os.path.abspath(fname)):
if options.verbose:
- print " Excluding coverage data for file "+fname
+ sys.stdout.write(" Excluding coverage data for file %s\n" % fname)
return
#
# Parse each line, and record the lines
@@ -286,23 +285,16 @@ def process_gcov_data(file, covdata, options):
#prev = int(segments[1].strip())
#first_record=True
else:
- print "UNKNOWN LINE DATA:",tmp
+ sys.stdout.write("UNKNOWN LINE DATA: %s\n" % tmp)
#
# If the file is already in covdata, then we
# remove lines that are covered here. Otherwise,
# initialize covdata
#
- #print "HERE",fname
- #print "HERE uncovered",uncovered
- #print "HERE covered",covered
if not fname in covdata:
covdata[fname] = CoverageData(fname,uncovered,covered,branches,noncode)
else:
- #print "HERE B uncovered",covdata[fname].uncovered
- #print "HERE B covered",covdata[fname].covered
covdata[fname].update(uncovered,covered,branches,noncode)
- #print "HERE A uncovered",covdata[fname].uncovered
- #print "HERE A covered",covdata[fname].covered
INPUT.close()
#
@@ -409,7 +401,7 @@ def process_datafile(filename, covdata, options):
# cmd.extend(["--object-directory", Template(options.objdir).substitute(filename=filename, head=dirname, tail=base, root=name, ext=ext)])
if options.verbose:
- print "Running gcov: '%s' in '%s'" % ( ' '.join(cmd), os.getcwd() )
+ sys.stdout.write("Running gcov: '%s' in '%s'\n" % ( ' '.join(cmd), os.getcwd() ))
(out, err) = subprocess.Popen( cmd,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE ).communicate()
@@ -422,7 +414,7 @@ def process_datafile(filename, covdata, options):
fname = found.group(1)
if not options.gcov_filter.match(fname):
if options.verbose:
- print "Filtering gcov file",fname
+ sys.stdout.write("Filtering gcov file %s\n" % fname)
continue
exclude=False
for i in range(0,len(options.gcov_exclude)):
@@ -434,8 +426,7 @@ def process_datafile(filename, covdata, options):
if not exclude:
gcov_files.append(fname)
elif options.verbose:
- print "Excluding gcov file",fname
- #print "Output files\n" + "\n".join(gcov_files)
+ sys.stdout.write("Excluding gcov file %s\n" % fname)
if source_re.search(err):
# gcov tossed errors: try the next potential_wd
@@ -455,9 +446,9 @@ def process_datafile(filename, covdata, options):
os.remove(abs_filename)
if not Done:
- print "GCOV produced the following errors processing %s:\n %s" \
+ sys.stdout.write("GCOV produced the following errors processing %s:\n %s" \
"(gcovr could not infer a working directory " \
- "that resolved it.)" % ( filename, " ".join(errors) )
+ "that resolved it.)\n" % ( filename, " ".join(errors) ))
#
# Produce the classic gcovr text report
@@ -482,28 +473,28 @@ def print_text_report(covdata):
total_lines=0
total_covered=0
# Header
- print >>OUTPUT, "-"*78
+ OUTPUT.write("-"*78 + '\n')
a = options.show_branch and "Branch" or "Lines"
b = options.show_branch and "Taken" or "Exec"
- print >>OUTPUT, "File".ljust(40) + a.rjust(8) + b.rjust(8)+ " Cover Missing"
- print >>OUTPUT, "-"*78
+ OUTPUT.write("File".ljust(40) + a.rjust(8) + b.rjust(8)+ " Cover Missing\n")
+ OUTPUT.write("-"*78 + '\n')
# Data
- keys = covdata.keys()
+ keys = list(covdata.keys())
keys.sort(key=options.sort_uncovered and _num_uncovered or \
options.sort_percent and _percent_uncovered or _alpha)
for key in keys:
(t, n, txt) = covdata[key].summary()
total_lines += t
total_covered += n
- print >>OUTPUT, txt
+ OUTPUT.write(txt + '\n')
# Footer & summary
- print >>OUTPUT, "-"*78
+ OUTPUT.write("-"*78 + '\n')
percent = total_lines and str(int(100.0*total_covered/total_lines)) or "--"
- print >>OUTPUT, "TOTAL".ljust(40) + str(total_lines).rjust(8) + \
- str(total_covered).rjust(8) + str(percent).rjust(6)+"%"
- print >>OUTPUT, "-"*78
+ OUTPUT.write("TOTAL".ljust(40) + str(total_lines).rjust(8) + \
+ str(total_covered).rjust(8) + str(percent).rjust(6)+"%" + '\n')
+ OUTPUT.write("-"*78 + '\n')
# Close logfile
if options.output:
@@ -519,13 +510,13 @@ def print_xml_report(covdata):
lineCovered = 0
options.show_branch = True
- for key in covdata.keys():
+ for key in list(covdata.keys()):
(total, covered, percent) = covdata[key].coverage()
branchTotal += total
branchCovered += covered
options.show_branch = False
- for key in covdata.keys():
+ for key in list(covdata.keys()):
(total, covered, percent) = covdata[key].coverage()
lineTotal += total
lineCovered += covered
@@ -554,7 +545,7 @@ def print_xml_report(covdata):
packages = {}
source_dirs = set()
- keys = covdata.keys()
+ keys = list(covdata.keys())
keys.sort()
for f in keys:
data = covdata[f]
@@ -596,7 +587,7 @@ def print_xml_report(covdata):
l.setAttribute("branch", "false")
else:
b_hits = 0
- for v in branches.values():
+ for v in list(branches.values()):
if v > 0:
b_hits += 1
coverage = 100*b_hits/len(branches)
@@ -630,12 +621,12 @@ def print_xml_report(covdata):
package[4] += class_branch_hits
package[5] += class_branches
- for packageName, packageData in packages.items():
+ for packageName, packageData in list(packages.items()):
package = packageData[0];
packageXml.appendChild(package)
classes = doc.createElement("classes")
package.appendChild(classes)
- classNames = packageData[1].keys()
+ classNames = list(packageData[1].keys())
classNames.sort()
for className in classNames:
classes.appendChild(packageData[1][className])
@@ -660,8 +651,8 @@ def print_xml_report(covdata):
reldir = d[len(cwd):].lstrip(os.path.sep)
elif cwd.startswith(d):
i = 1
- print d
- print os.path.join(*tuple([cwd]+['..']*i))
+ sys.stdout.write(d+'\n')
+ sys.stdout.write(os.path.join(*tuple([cwd]+['..']*i)) + '\n')
while normpath(d) != \
normpath(os.path.join(*tuple([cwd]+['..']*i))):
i += 1
@@ -678,10 +669,10 @@ def print_xml_report(covdata):
xmlString = doc.toprettyxml()
#xml.dom.ext.PrettyPrint(doc)
if options.output is None:
- print xmlString
+ sys.stdout.write(xmlString+'\n')
else:
OUTPUT = open(options.output, 'w')
- print >>OUTPUT, xmlString
+ OUTPUT.write(xmlString +'\n')
OUTPUT.close()
@@ -775,17 +766,17 @@ parser.description="A utility to run gcov and generate a simple report that summ
#
(options, args) = parser.parse_args(args=sys.argv)
if options.version:
- print "gcovr "+__version__
- print ""
- print "Copyright (2008) Sandia Corporation. Under the terms of Contract "
- print "DE-AC04-94AL85000 with Sandia Corporation, the U.S. Government "
- print "retains certain rights in this software."
+ sys.stdout.write("gcovr "+__version__+'\n')
+ sys.stdout.write("\n")
+ sys.stdout.write("Copyright (2008) Sandia Corporation. Under the terms of Contract \n")
+ sys.stdout.write("DE-AC04-94AL85000 with Sandia Corporation, the U.S. Government \n")
+ sys.stdout.write("retains certain rights in this software.\n")
sys.exit(0)
if options.objdir:
if normpath(options.objdir) != options.objdir.replace('/',os.sep):
- print "WARNING: relative referencing in --object-directory; this could"
- print " cause strange errors when gcovr attempts to identify"
- print " the original gcc working directory."
+ sys.stdout.write("WARNING: relative referencing in --object-directory; this could\n")
+ sys.stdout.write(" cause strange errors when gcovr attempts to identify\n")
+ sys.stdout.write(" the original gcc working directory.\n")
#
# Setup filters
#
@@ -818,7 +809,7 @@ covdata = {}
for file in datafiles:
process_datafile(file,covdata,options)
if options.verbose:
- print "Gathered coveraged data for "+str(len(covdata))+" files"
+ sys.stdout.write("Gathered coveraged data for "+str(len(covdata))+" files\n")
#
# Print report
#

0 comments on commit 4860fb2

Please sign in to comment.