Skip to content

Commit

Permalink
pattern info serializable
Browse files Browse the repository at this point in the history
  • Loading branch information
wntgd committed Feb 22, 2020
1 parent ca9c297 commit 0c1ef83
Show file tree
Hide file tree
Showing 7 changed files with 133 additions and 114 deletions.
6 changes: 6 additions & 0 deletions graph_analyzer/json_serializer.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
from json import JSONEncoder

from pattern_detection import DetectionResult
from pattern_detectors.PatternInfo import PatternInfo
from pattern_detectors.pipeline_detector import PipelineStage
from variable import Variable


Expand Down Expand Up @@ -32,6 +34,10 @@ def default(self, o):
return o.name
if isinstance(o, PatternInfo):
return filter_members(o.__dict__)
if isinstance(o, DetectionResult):
return filter_members(o.__dict__)
if isinstance(o, PipelineStage):
return filter_members(o.__dict__)

# Let the base class default method raise the TypeError
return JSONEncoder.default(self, o)
32 changes: 21 additions & 11 deletions graph_analyzer/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,22 +6,24 @@
# a BSD-style license. See the LICENSE file in the package base
# directory for details.

"""Discopop analyzer.
"""Discopop analyzer
Usage:
main.py [--path <path>] [--cu-xml <cuxml>] [--dep-file <depfile>] [--plugins <plugs>] \
[--loop-counter <loopcount>] [--reduction <reduction>]
[--loop-counter <loopcount>] [--reduction <reduction>] [--json <json_out>]
Options:
--path=<path> Directory with input data [default: ./]
--cu-xml=<cuxml> CU node xml file [default: Data.xml].
--dep-file=<depfile> Dependencies text file [default: dep.txt].
--loop-counter=<loopcount> Loop counter data [default: loop_counter_output.txt].
--reduction=<reduction> Reduction variables file [default: reduction.txt].
--cu-xml=<cuxml> CU node xml file [default: Data.xml]
--dep-file=<depfile> Dependencies text file [default: dep.txt]
--loop-counter=<loopcount> Loop counter data [default: loop_counter_output.txt]
--reduction=<reduction> Reduction variables file [default: reduction.txt]
--json=<json_out> Json output
--plugins=<plugs> Plugins to execute
-h --help Show this screen.
--version Show version.
-h --help Show this screen
--version Show version
"""
import json
import os
import time

Expand All @@ -30,16 +32,18 @@
from schema import Schema, Use, SchemaError

from PETGraph import PETGraph
from json_serializer import PatternInfoSerializer
from parser import parse_inputs
from pattern_detection import PatternDetector
from pattern_detection import PatternDetector, DetectionResult

docopt_schema = Schema({
'--path': Use(str),
'--cu-xml': Use(str),
'--dep-file': Use(str),
'--loop-counter': Use(str),
'--reduction': Use(str),
'--plugins': Use(str)
'--plugins': Use(str),
'--json': Use(str)
})


Expand Down Expand Up @@ -96,7 +100,13 @@ def get_path(base_path: str, file: str) -> str:
graph = p.run(graph)

pattern_detector = PatternDetector(graph)
pattern_detector.detect_patterns()
res: DetectionResult = pattern_detector.detect_patterns()

if arguments['--json'] == 'None':
print(str(res))
else:
with open(arguments['--json'], 'w') as f:
json.dump(res, f, indent=2, cls=PatternInfoSerializer)

end = time.time()

Expand Down
47 changes: 30 additions & 17 deletions graph_analyzer/pattern_detection.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,16 +5,31 @@
# This software may be modified and distributed under the terms of
# a BSD-style license. See the LICENSE file in the package base
# directory for details.
import itertools
import json
from typing import List

import utils
from PETGraph import PETGraph
from json_serializer import PatternInfoSerializer
from pattern_detectors.do_all_detector import run_detection as detect_do_all
from pattern_detectors.geometric_decomposition_detector import run_detection as detect_gd
from pattern_detectors.pipeline_detector import run_detection as detect_pipeline
from pattern_detectors.reduction_detector import run_detection as detect_reduction
from pattern_detectors.task_parallelism_detector import run_detection as detect_tp
from pattern_detectors.do_all_detector import run_detection as detect_do_all, DoAllInfo
from pattern_detectors.geometric_decomposition_detector import run_detection as detect_gd, GDInfo
from pattern_detectors.pipeline_detector import run_detection as detect_pipeline, PipelineInfo
from pattern_detectors.reduction_detector import run_detection as detect_reduction, ReductionInfo
from pattern_detectors.task_parallelism_detector import run_detection as detect_tp, TaskParallelismInfo


class DetectionResult(object):
reduction: List[ReductionInfo]
do_all: List[DoAllInfo]
pipeline: List[PipelineInfo]
geometric_decomposition: List[GDInfo]
task_parallelism: List[TaskParallelismInfo]

def __init__(self):
pass

def __str__(self):
return '\n\n\n'.join(["\n\n".join([str(v2) for v2 in v]) for v in self.__dict__.values() if v is not None])


class PatternDetector(object):
Expand Down Expand Up @@ -57,33 +72,31 @@ def detect_patterns(self):
"""
self.__merge(False, True)

res = DetectionResult()

print('===DETECTING REDUCTION===')
# reduction before doall!
if self.pet.reduction_vars is not None:
for reduction in detect_reduction(self.pet):
print(json.dumps(reduction, indent=2,
cls=PatternInfoSerializer))
res.reduction = detect_reduction(self.pet)
else:
print('reduction variables are required for this detector\n')

print('===DETECTING DO ALL===')
for do_all in detect_do_all(self.pet):
print(do_all, '\n')
res.do_all = detect_do_all(self.pet)

print('===DETECTING PIPELINE===')
for pipeline in detect_pipeline(self.pet):
print(pipeline, '\n')
res.pipeline = detect_pipeline(self.pet)

print('===DETECTING GEOMETRIC DECOMPOSITION===')
if self.pet.loop_data is not None:
for gd in detect_gd(self.pet):
print(gd, '\n')
res.geometric_decomposition = detect_gd(self.pet)
else:
print('loop iteration data is required for this detector\n')

print('===DETECTING TASK PARALLELISM===')
if self.pet.loop_data is not None:
for tp in detect_tp(self.pet):
print(tp, '\n')
res.task_parallelism = detect_tp(self.pet)
else:
print('loop iteration data is required for this detector\n')

return res
4 changes: 1 addition & 3 deletions graph_analyzer/pattern_detectors/do_all_detector.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,6 @@
# This software may be modified and distributed under the terms of
# a BSD-style license. See the LICENSE file in the package base
# directory for details.


from typing import List

import numpy as np
Expand All @@ -33,7 +31,7 @@ def __init__(self, pet: PETGraph, node: Vertex, coefficient: float):
:param coefficient: correlation coefficient
"""
PatternInfo.__init__(self, pet, node)
self.coefficient = coefficient
self.coefficient = round(coefficient, 3)
fp, p, lp, s, r = classify_loop_variables(pet, node)
self.first_private = fp
self.private = p
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,32 +16,31 @@
import PETGraph
from pattern_detectors.PatternInfo import PatternInfo
from pattern_detectors.do_all_detector import do_all_threshold
from utils import find_subnodes, get_subtree_of_type, get_loop_iterations, classify_loop_variables, \
calculate_workload, classify_task_vars, get_child_loops
from utils import find_subnodes, get_subtree_of_type, get_loop_iterations, classify_task_vars, get_child_loops
# cache
from variable import Variable

__loop_iterations: Dict[str, int] = {}


class GdSubLoopInfo(PatternInfo):
"""Class, that contains do-all detection result
class GDInfo(PatternInfo):
"""Class, that contains geometric decomposition detection result
"""
coefficient: float

def __init__(self, pet: PETGraph, base: Vertex, use_tasks: bool, min_iter):
def __init__(self, pet: PETGraph, node: Vertex, min_iter: int):
"""
:param pet: PET graph
:param node: node, where do-all was detected
:param node: node, where geometric decomposition was detected
"""
PatternInfo.__init__(self, pet, base)
self.pet = pet
self.base = base
PatternInfo.__init__(self, pet, node)

do_all, reduction = get_child_loops(pet, node)
self.do_all_children = [pet.graph.vp.id[v] for v in do_all]
self.reduction_children = [pet.graph.vp.id[v] for v in reduction]

self.min_iter_number = min_iter
mi_sqrt = math.sqrt(min_iter)
wl = math.sqrt(calculate_workload(pet, base))
wl = math.sqrt(self.workload)
nt = 1.1 * mi_sqrt + 0.0002 * wl - 0.0000002 * mi_sqrt * wl - 10

if nt >= 1000:
Expand All @@ -53,27 +52,25 @@ def __init__(self, pet: PETGraph, base: Vertex, use_tasks: bool, min_iter):
else:
self.num_tasks = math.floor(nt)

if use_tasks:
self.pragma = "for (i = 0; i < num-tasks; i++) #pragma omp task"
lp = []
fp, p, s, in_dep, out_dep, in_out_dep, r = \
classify_task_vars(self.pet, base, "GeometricDecomposition", [], [])
fp.append(Variable('int', 'i'))
else:
# TODO classify task loop vars
self.pragma = "#pragma omp taskloop num_tasks(num-tasks) for (i = 0; i < num-tasks; i++)"
fp, p, lp, s, r = classify_loop_variables(pet, base)
self.pragma = "for (i = 0; i < num-tasks; i++) #pragma omp task"
lp = []
fp, p, s, in_dep, out_dep, in_out_dep, r = \
classify_task_vars(pet, node, "GeometricDecomposition", [], [])
fp.append(Variable('int', 'i'))

self.first_private = fp
self.private = p
self.last_private = lp
self.shared = s
self.reduction = r

def __str__(self):
return f'\tNode: {self.node_id}\n' \
f'\tStart line: {self.start_line}\n' \
f'\tEnd line: {self.end_line}\n' \
f'\tType: Geometric Decomposition Pattern\n' \
return f'Geometric decomposition at: {self.node_id}\n' \
f'Start line: {self.start_line}\n' \
f'End line: {self.end_line}\n' \
f'Type: Geometric Decomposition Pattern\n' \
f'Do-All loops: {[n for n in self.do_all_children]}\n' \
f'Reduction loops: {[n for n in self.reduction_children]}\n' \
f'\tNumber of tasks: {self.num_tasks}\n' \
f'\tChunk limits: {self.min_iter_number}\n' \
f'\tpragma: {self.pragma}]\n' \
Expand All @@ -84,35 +81,6 @@ def __str__(self):
f'\tlast private: {[v.name for v in self.last_private]}'


class GDInfo(PatternInfo):
"""Class, that contains geometric decomposition detection result
"""

def __init__(self, pet: PETGraph, node: Vertex, min_iter: int):
"""
:param pet: PET graph
:param node: node, where geometric decomposition was detected
"""
PatternInfo.__init__(self, pet, node)
self.pet = pet

self.do_all_children, self.reduction_children = get_child_loops(pet, node)

self.sub_loop_info = [GdSubLoopInfo(pet, node, True, min_iter)]

def __str__(self):
s = f'Geometric decomposition at: {self.node_id}\n' \
f'Start line: {self.start_line}\n' \
f'End line: {self.end_line}\n' \
f'Type: Geometric Decomposition Pattern\n' \
f'Do-All loops: {[self.pet.graph.vp.id[n] for n in self.do_all_children]}\n' \
f'Reduction loops: {[self.pet.graph.vp.id[n] for n in self.reduction_children]}\n\n'
for t in self.sub_loop_info:
s += str(t) + '\n\n'

return s


def run_detection(pet: PETGraph) -> List[GDInfo]:
"""Detects geometric decomposition
Expand Down
Loading

0 comments on commit 0c1ef83

Please sign in to comment.