This repository has been archived by the owner on May 23, 2022. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 0
/
HSDAG.java
173 lines (142 loc) · 5.94 KB
/
HSDAG.java
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
/*
*
* * Consistency-based Algorithms for Conflict Detection and Resolution
* *
* * Copyright (c) 2022
* *
* * @author: Viet-Man Le (vietman.le@ist.tugraz.at)
*
*/
package at.tugraz.ist.ase.cacdr.algorithms.hsdag;
import at.tugraz.ist.ase.cacdr.algorithms.hsdag.labeler.IHSLabelable;
import at.tugraz.ist.ase.cacdr.algorithms.hsdag.parameters.AbstractHSParameters;
import at.tugraz.ist.ase.cacdr.checker.ChocoConsistencyChecker;
import at.tugraz.ist.ase.common.LoggerUtils;
import at.tugraz.ist.ase.knowledgebases.core.Constraint;
import com.google.common.collect.Sets;
import lombok.extern.slf4j.Slf4j;
import java.util.*;
import static at.tugraz.ist.ase.eval.evaluator.PerformanceEvaluator.*;
/**
* Implementation of the HS-dag algorithm.
* IHSLabeler algorithms could return labels (conflict or diagnosis) which are not minimal.
*
* source: https://github.com/jaccovs/Master-project
* @author Viet-Man Le (vietman.le@ist.tugraz.at)
*/
@Slf4j
public class HSDAG extends HSTree {
// Map of <pathLabels, Node>
private final Map<Set<Constraint>, Node> nodesLookup = new HashMap<>();
public HSDAG(IHSLabelable labeler, ChocoConsistencyChecker checker) {
super(labeler, checker);
}
@Override
protected List<Set<Constraint>> computeLabel(Node node) {
AbstractHSParameters param = node.getParameters();
start(TIMER_CONFLICT);
List<Set<Constraint>> conflicts = getLabeler().getLabel(param);
if (!conflicts.isEmpty()) {
stop(TIMER_CONFLICT);
// check existing and obtained conflicts for subset-relations
List<Set<Constraint>> nonMinConflicts = new LinkedList<>();
for (Set<Constraint> fs : getConflicts()) {
if (nonMinConflicts.contains(fs)) {
continue;
}
for (Set<Constraint> cs : conflicts) {
if (nonMinConflicts.contains(cs)) {
continue;
}
Set<Constraint> greater = (fs.size() > cs.size()) ? fs : cs;
Set<Constraint> smaller = (fs.size() > cs.size()) ? cs : fs;
if (greater.containsAll(smaller)) {
nonMinConflicts.add(greater);
// update the DAG
List<Node> nodes = this.cs_nodesMap.get(greater);
if (nodes != null) {
for (Node nd : nodes) {
incrementCounter(COUNTER_PRUNING);
nd.setLabel(smaller); // relabel the node with smaller
addItemToCSNodesMap(smaller, nd); // add new label to the map
Set<Constraint> delete = Sets.difference(greater, smaller);
for (Constraint label : delete) {
Node child = nd.getChildren().get(label);
if (child != null) {
child.getParents().remove(nd);
}
nd.getChildren().remove(label);
cleanUpNodes(nd);
}
}
}
}
}
}
// remove the known non-minimal conflicts
conflicts.removeAll(nonMinConflicts);
for (Set<Constraint> cs : nonMinConflicts) {
this.cs_nodesMap.remove(cs);
}
// add new conflicts to the list of conflicts
addConflicts(conflicts);
} else {
// stop TIMER_CONFLICT without saving the time
stop(TIMER_CONFLICT, false);
}
return conflicts;
}
private void cleanUpNodes(Node node) {
if (!node.getParents().isEmpty()) {
return;
}
nodesLookup.remove(node.getPathLabels());
if (node.getStatus() == NodeStatus.Open) {
node.setStatus(NodeStatus.Pruned);
incrementCounter(COUNTER_CLEANED_NODES);
}
// downward clean up
for (Constraint arcLabel : node.getChildren().keySet()) {
Node child = node.getChildren().get(arcLabel);
cleanUpNodes(child);
}
}
@Override
protected void expand(Node nodeToExpand) {
log.trace("{}Generating the children nodes of [node={}]", LoggerUtils.tab, nodeToExpand);
LoggerUtils.indent();
for (Constraint arcLabel : nodeToExpand.getLabel()) {
AbstractHSParameters param_parentNode = nodeToExpand.getParameters();
AbstractHSParameters new_param = getLabeler().createParameter(param_parentNode, arcLabel);
// rule 1.a - reuse node
Node node = getReusableNode(nodeToExpand.getPathLabels(), arcLabel);
if (node != null) {
node.addParent(nodeToExpand);
incrementCounter(COUNTER_REUSE_NODES);
log.trace("{}Reusing [node={}]", LoggerUtils.tab, node);
} else { // rule 1.b - generate a new node
node = Node.builder()
.parent(nodeToExpand)
.parameters(new_param)
.arcLabel(arcLabel)
.build();
this.nodesLookup.put(node.getPathLabels(), node);
incrementCounter(COUNTER_CONSTRUCTED_NODES);
if (!canPrune(node)) {
openNodes.add(node);
}
}
}
LoggerUtils.outdent();
}
private Node getReusableNode(Set<Constraint> pathLabels, Constraint arcLabel) {
Set<Constraint> h = new LinkedHashSet<>(pathLabels);
h.add(arcLabel);
return this.nodesLookup.get(h);
}
@Override
public void resetEngine() {
super.resetEngine();
this.nodesLookup.clear();
}
}