Skip to content

Commit

Permalink
Implement Basic Constraint Based algorithm
Browse files Browse the repository at this point in the history
  • Loading branch information
i3wangyi committed Sep 4, 2019
1 parent fd18212 commit 8947696
Show file tree
Hide file tree
Showing 11 changed files with 398 additions and 72 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -19,23 +19,23 @@
* under the License.
*/

import java.util.HashMap;
import java.util.Map;

import org.apache.helix.HelixManager;
import org.apache.helix.HelixRebalanceException;
import org.apache.helix.controller.changedetector.ResourceChangeDetector;
import org.apache.helix.controller.dataproviders.ResourceControllerDataProvider;
import org.apache.helix.controller.rebalancer.DelayedAutoRebalancer;
import org.apache.helix.controller.rebalancer.internal.MappingCalculator;
import org.apache.helix.controller.rebalancer.waged.constraints.ConstraintsRebalanceAlgorithm;
import org.apache.helix.controller.rebalancer.waged.constraints.RebalanceAlgorithm;
import org.apache.helix.controller.rebalancer.waged.algorithm.RebalanceAlgorithm;
import org.apache.helix.controller.rebalancer.waged.algorithm.RebalanceAlgorithmFactory;
import org.apache.helix.controller.stages.CurrentStateOutput;
import org.apache.helix.model.IdealState;
import org.apache.helix.model.Resource;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.HashMap;
import java.util.Map;

/**
* A placeholder before we have the implementation.
* Weight-Aware Globally-Even Distribute Rebalancer.
Expand Down Expand Up @@ -70,7 +70,7 @@ public WagedRebalancer(HelixManager helixManager) {
// TODO init the metadata store according to their requirement when integrate, or change to final static method if possible.
_assignmentMetadataStore = new AssignmentMetadataStore();
// TODO init the algorithm according to the requirement when integrate.
_rebalanceAlgorithm = new ConstraintsRebalanceAlgorithm();
_rebalanceAlgorithm = RebalanceAlgorithmFactory.getInstance(RebalanceAlgorithm.Type.CONSTRAINTS);

// Use the mapping calculator in DelayedAutoRebalancer for calculating the final assignment
// output.
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
package org.apache.helix.controller.rebalancer.waged.constraints;
package org.apache.helix.controller.rebalancer.waged.algorithm;

/*
* Licensed to the Apache Software Foundation (ASF) under one
Expand All @@ -20,25 +20,27 @@
*/

import org.apache.helix.controller.rebalancer.waged.model.ClusterModel;
import org.apache.helix.model.ResourceAssignment;

import java.util.Map;
import org.apache.helix.controller.rebalancer.waged.model.OptimalAssignment;

/**
* A generic rebalance algorithm interface for the WAGED rebalancer.
* A generic interface used to give the optimal assignment given the runtime cluster environment.
*
* @see <a href="Rebalance Algorithm">https://github.com/apache/helix/wiki/Design-Proposal---Weight-Aware-Globally-Even-Distribute-Rebalancer#rebalance-algorithm-adapter</a>
* <pre>
* @see <a href="https://github.com/apache/helix/wiki/
* Design-Proposal---Weight-Aware-Globally-Even-Distribute-Rebalancer
* #rebalance-algorithm-adapter">Rebalance Algorithm</a>
* </pre>
*/
public interface RebalanceAlgorithm {
enum Type {
CONSTRAINTS
}

/**
* Rebalance the Helix resource partitions based on the input cluster model.
*
* @param clusterModel
* @param failureReasons Return the failures <ResourceName, <FailureReason, Count>> that happen during the rebalance calculation.
* If the map is null, no failure will be returned.
* @return A map of <ResourceName, ResourceAssignment>.
* @param clusterModel The run time cluster model that contains all necessary information
* @return An instance of {@link OptimalAssignment}
*/
Map<String, ResourceAssignment> rebalance(ClusterModel clusterModel,
Map<String, Map<HardConstraint, Integer>> failureReasons);
OptimalAssignment calculate(ClusterModel clusterModel);
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
package org.apache.helix.controller.rebalancer.waged.algorithm;

/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

import org.apache.helix.HelixException;
import org.apache.helix.controller.rebalancer.waged.constraints.ConstraintBasedAlgorithm;
import org.apache.helix.controller.rebalancer.waged.constraints.HardConstraint;
import org.apache.helix.controller.rebalancer.waged.constraints.SoftConstraint;

public class RebalanceAlgorithmFactory {
private static Map<RebalanceAlgorithm.Type, RebalanceAlgorithm> INSTANCES = new HashMap<>();

public static synchronized RebalanceAlgorithm getInstance(RebalanceAlgorithm.Type type) {
if (INSTANCES.containsKey(type)) {
return INSTANCES.get(type);
}

switch (type) {
case CONSTRAINTS:
// TODO initialize constraints, depending on constraints PRs
List<HardConstraint> hardConstraints = new ArrayList<>();
List<SoftConstraint> softConstraints = new ArrayList<>();
RebalanceAlgorithm constraintBaseAlgorithm = new ConstraintBasedAlgorithm(hardConstraints, softConstraints);
INSTANCES.put(RebalanceAlgorithm.Type.CONSTRAINTS, constraintBaseAlgorithm);
return constraintBaseAlgorithm;
default:
throw new HelixException("Unsupported algorithm type: " + type);
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,116 @@
package org.apache.helix.controller.rebalancer.waged.constraints;

/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

import java.util.ArrayList;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.function.Function;
import java.util.stream.Collectors;

import org.apache.helix.controller.rebalancer.waged.algorithm.RebalanceAlgorithm;
import org.apache.helix.controller.rebalancer.waged.model.AssignableNode;
import org.apache.helix.controller.rebalancer.waged.model.AssignableReplica;
import org.apache.helix.controller.rebalancer.waged.model.ClusterContext;
import org.apache.helix.controller.rebalancer.waged.model.ClusterModel;
import org.apache.helix.controller.rebalancer.waged.model.OptimalAssignment;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;



/**
* The algorithm is based on a given set of constraints
* - HardConstraint: Approve or deny the assignment given its condition, any assignment cannot
* bypass any "hard constraint"
* - SoftConstraint: Evaluate the assignment by points/rewards/scores, a higher point means a better
* assignment
* The goal is to accumulate the most points(rewards) from "soft constraints" while avoiding any
* "hard constraints"
*/
public class ConstraintBasedAlgorithm implements RebalanceAlgorithm {
private static final Logger LOG = LoggerFactory.getLogger(ConstraintBasedAlgorithm.class);
private final List<HardConstraint> _hardConstraints;
private final List<SoftConstraint> _softConstraints;

public ConstraintBasedAlgorithm(List<HardConstraint> hardConstraints,
List<SoftConstraint> softConstraints) {
_hardConstraints = hardConstraints;
_softConstraints = softConstraints;
}

@Override
public OptimalAssignment calculate(ClusterModel clusterModel) {
OptimalAssignment optimalAssignment = new OptimalAssignment();
Map<String, Set<AssignableReplica>> replicasByResource = clusterModel.getAssignableReplicaMap();
List<AssignableNode> nodes = new ArrayList<>(clusterModel.getAssignableNodes().values());

// TODO: different orders of resource/replica could lead to different greedy assignments, will
// revisit and improve the performance
for (String resource : replicasByResource.keySet()) {
for (AssignableReplica replica : replicasByResource.get(resource)) {
Optional<AssignableNode> maybeBestNode =
getNodeWithHighestPoints(replica, nodes, clusterModel.getContext(), optimalAssignment);
// stop immediately if any replica cannot find best assignable node
if (optimalAssignment.hasAnyFailure()) {
LOG.error("Unable to find any available candidate node for partition {}; Fail reasons: {}",
replica.getPartitionName(), optimalAssignment.getFailures());
return optimalAssignment;
}
maybeBestNode.ifPresent(node -> clusterModel.assign(replica.getResourceName(), replica.getPartitionName(),
replica.getReplicaState(), node.getInstanceName()));
}
}

return optimalAssignment.convertFrom(clusterModel);
}

private Optional<AssignableNode> getNodeWithHighestPoints(AssignableReplica replica,
List<AssignableNode> assignableNodes, ClusterContext clusterContext, OptimalAssignment optimalAssignment) {
Map<AssignableNode, List<HardConstraint>> hardConstraintFailures = new HashMap<>();
List<AssignableNode> candidateNodes = assignableNodes.stream().filter(candidateNode -> {
boolean isValid = true;
// need to record all the failure reasons and it gives us the ability to debug/fix the runtime
// cluster environment
for (HardConstraint hardConstraint : _hardConstraints) {
if (!hardConstraint.isAssignmentValid(candidateNode, replica, clusterContext)) {
hardConstraintFailures.computeIfAbsent(candidateNode, node -> new ArrayList<>()).add(hardConstraint);
isValid = false;
}
}
return isValid;
}).collect(Collectors.toList());
if (candidateNodes.isEmpty()) {
optimalAssignment.recordAssignmentFailure(replica, hardConstraintFailures);
return Optional.empty();
}

Function<AssignableNode, Double> calculatePoints =
(candidateNode) -> _softConstraints.stream().map(softConstraint -> softConstraint
.getAssignmentOriginScore(candidateNode, replica, clusterContext))
.mapToDouble(score -> score).sum();

return candidateNodes.stream().max(Comparator.comparing(calculatePoints));
}
}

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -27,21 +27,21 @@
* Evaluate a partition allocation proposal and return YES or NO based on the cluster context.
* Any proposal fails one or more hard constraints will be rejected.
*/
abstract class HardConstraint {
public abstract class HardConstraint {

/**
* Check if the replica could be assigned to the node
* @return True if the proposed assignment is valid; False otherwise
*/
abstract boolean isAssignmentValid(AssignableNode node, AssignableReplica replica,
public abstract boolean isAssignmentValid(AssignableNode node, AssignableReplica replica,
ClusterContext clusterContext);

/**
* Return class name by default as description if it's explanatory enough, child class could override
* the method and add more detailed descriptions
* @return The detailed description of hard constraint
*/
String getDescription() {
public String getDescription() {
return getClass().getName();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ private LeastPartitionCountConstraint() {
* clusterContext.
*/
@Override
float getAssignmentOriginScore(AssignableNode node, AssignableReplica replica,
public float getAssignmentOriginScore(AssignableNode node, AssignableReplica replica,
ClusterContext clusterContext) {
throw new UnsupportedOperationException("The POC implementation has a bug, will fix it as TODO");
// float doubleMaxPartitionCount = 2.0f * clusterContext.getEstimatedMaxPartitionCount();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
* Evaluate a partition allocation proposal and return a score within the normalized range.
* A higher score means the proposal is more preferred.
*/
abstract class SoftConstraint {
public abstract class SoftConstraint {
private float _maxScore = 1000f;
private float _minScore = -1000f;

Expand Down Expand Up @@ -65,7 +65,7 @@ interface ScalerFunction {
* differently.
* @return float value representing the score
*/
abstract float getAssignmentOriginScore(AssignableNode node, AssignableReplica replica,
public abstract float getAssignmentOriginScore(AssignableNode node, AssignableReplica replica,
ClusterContext clusterContext);

/**
Expand Down
Loading

0 comments on commit 8947696

Please sign in to comment.