Skip to content

Commit

Permalink
Implement the basic constraint based algorithm (#381)
Browse files Browse the repository at this point in the history
Implement basic constraint algorithm: Greedy based, each time it picks the best scores given each replica and assigns the replica to the node. It doesn't guarantee to achieve global optimal but local optimal result

The algorithm is based on a given set of constraints

* HardConstraint: Approve or deny the assignment given its condition, any assignment cannot bypass any "hard constraint"
* SoftConstraint: Evaluate the assignment by points/rewards/scores, a higher point means a better assignment
The goal is to avoid all "hard constraints" while accumulating the most points(rewards) from "soft constraints"
  • Loading branch information
i3wangyi authored and jiajunwang committed Feb 7, 2020
1 parent eb437c9 commit cf8520a
Show file tree
Hide file tree
Showing 10 changed files with 396 additions and 74 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -23,21 +23,26 @@
* Exception thrown by Helix due to rebalance failures.
*/
public class HelixRebalanceException extends Exception {
enum RebalanceFailureType {
public enum Type {
INVALID_CLUSTER_STATUS,
INVALID_REBALANCER_STATUS,
FAILED_TO_CALCULATE,
UNKNOWN_FAILURE
}

private final RebalanceFailureType _type;
private final Type _type;

public HelixRebalanceException(String message, RebalanceFailureType type, Throwable cause) {
public HelixRebalanceException(String message, Type type, Throwable cause) {
super(String.format("%s. Failure Type: %s", message, type.name()), cause);
_type = type;
}

public RebalanceFailureType getFailureType() {
public HelixRebalanceException(String message, Type type) {
super(String.format("%s. Failure Type: %s", message, type.name()));
_type = type;
}

public Type getFailureType() {
return _type;
}
}
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
package org.apache.helix.controller.rebalancer.waged.constraints;
package org.apache.helix.controller.rebalancer.waged;

/*
* Licensed to the Apache Software Foundation (ASF) under one
Expand All @@ -9,7 +9,7 @@
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
Expand All @@ -19,26 +19,25 @@
* under the License.
*/

import org.apache.helix.HelixRebalanceException;
import org.apache.helix.controller.rebalancer.waged.model.ClusterModel;
import org.apache.helix.model.ResourceAssignment;

import java.util.Map;
import org.apache.helix.controller.rebalancer.waged.model.OptimalAssignment;

/**
* A generic rebalance algorithm interface for the WAGED rebalancer.
* A generic interface to generate the optimal assignment given the runtime cluster environment.
*
* @see <a href="Rebalance Algorithm">https://github.com/apache/helix/wiki/Design-Proposal---Weight-Aware-Globally-Even-Distribute-Rebalancer#rebalance-algorithm-adapter</a>
* <pre>
* @see <a href="https://github.com/apache/helix/wiki/
* Design-Proposal---Weight-Aware-Globally-Even-Distribute-Rebalancer
* #rebalance-algorithm-adapter">Rebalance Algorithm</a>
* </pre>
*/
public interface RebalanceAlgorithm {

/**
* Rebalance the Helix resource partitions based on the input cluster model.
*
* @param clusterModel
* @param failureReasons Return the failures <ResourceName, <FailureReason, Count>> that happen during the rebalance calculation.
* If the map is null, no failure will be returned.
* @return A map of <ResourceName, ResourceAssignment>.
* @param clusterModel The run time cluster model that contains all necessary information
* @return An instance of {@link OptimalAssignment}
*/
Map<String, ResourceAssignment> rebalance(ClusterModel clusterModel,
Map<String, Map<HardConstraint, Integer>> failureReasons);
OptimalAssignment calculate(ClusterModel clusterModel) throws HelixRebalanceException;
}
Original file line number Diff line number Diff line change
Expand Up @@ -19,23 +19,22 @@
* under the License.
*/

import java.util.HashMap;
import java.util.Map;

import org.apache.helix.HelixManager;
import org.apache.helix.HelixRebalanceException;
import org.apache.helix.controller.changedetector.ResourceChangeDetector;
import org.apache.helix.controller.dataproviders.ResourceControllerDataProvider;
import org.apache.helix.controller.rebalancer.DelayedAutoRebalancer;
import org.apache.helix.controller.rebalancer.internal.MappingCalculator;
import org.apache.helix.controller.rebalancer.waged.constraints.ConstraintsRebalanceAlgorithm;
import org.apache.helix.controller.rebalancer.waged.constraints.RebalanceAlgorithm;
import org.apache.helix.controller.rebalancer.waged.constraints.ConstraintBasedAlgorithmFactory;
import org.apache.helix.controller.stages.CurrentStateOutput;
import org.apache.helix.model.IdealState;
import org.apache.helix.model.Resource;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.HashMap;
import java.util.Map;

/**
* A placeholder before we have the implementation.
* Weight-Aware Globally-Even Distribute Rebalancer.
Expand Down Expand Up @@ -69,8 +68,8 @@ private ResourceChangeDetector getChangeDetector() {
public WagedRebalancer(HelixManager helixManager) {
// TODO init the metadata store according to their requirement when integrate, or change to final static method if possible.
_assignmentMetadataStore = new AssignmentMetadataStore();
// TODO init the algorithm according to the requirement when integrate.
_rebalanceAlgorithm = new ConstraintsRebalanceAlgorithm();
// TODO parse the cluster setting
_rebalanceAlgorithm = ConstraintBasedAlgorithmFactory.getInstance();

// Use the mapping calculator in DelayedAutoRebalancer for calculating the final assignment
// output.
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,129 @@
package org.apache.helix.controller.rebalancer.waged.constraints;

/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

import java.util.ArrayList;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.function.Function;
import java.util.stream.Collectors;

import org.apache.helix.HelixRebalanceException;
import org.apache.helix.controller.rebalancer.waged.RebalanceAlgorithm;
import org.apache.helix.controller.rebalancer.waged.model.AssignableNode;
import org.apache.helix.controller.rebalancer.waged.model.AssignableReplica;
import org.apache.helix.controller.rebalancer.waged.model.ClusterContext;
import org.apache.helix.controller.rebalancer.waged.model.ClusterModel;
import org.apache.helix.controller.rebalancer.waged.model.OptimalAssignment;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import com.google.common.collect.Maps;

/**
* The algorithm is based on a given set of constraints
* - HardConstraint: Approve or deny the assignment given its condition, any assignment cannot
* bypass any "hard constraint"
* - SoftConstraint: Evaluate the assignment by points/rewards/scores, a higher point means a better
* assignment
* The goal is to accumulate the most points(rewards) from "soft constraints" while avoiding any
* "hard constraints"
*/
class ConstraintBasedAlgorithm implements RebalanceAlgorithm {
private static final Logger LOG = LoggerFactory.getLogger(ConstraintBasedAlgorithm.class);
private final List<HardConstraint> _hardConstraints;
private final List<SoftConstraint> _softConstraints;
private final SoftConstraintWeightModel _softConstraintsWeightModel;

ConstraintBasedAlgorithm(List<HardConstraint> hardConstraints,
List<SoftConstraint> softConstraints, SoftConstraintWeightModel softConstraintWeightModel) {
_hardConstraints = hardConstraints;
_softConstraints = softConstraints;
_softConstraintsWeightModel = softConstraintWeightModel;
}

@Override
public OptimalAssignment calculate(ClusterModel clusterModel) throws HelixRebalanceException {
OptimalAssignment optimalAssignment = new OptimalAssignment();
Map<String, Set<AssignableReplica>> replicasByResource = clusterModel.getAssignableReplicaMap();
List<AssignableNode> nodes = new ArrayList<>(clusterModel.getAssignableNodes().values());

// TODO: different orders of resource/replica could lead to different greedy assignments, will
// revisit and improve the performance
for (String resource : replicasByResource.keySet()) {
for (AssignableReplica replica : replicasByResource.get(resource)) {
Optional<AssignableNode> maybeBestNode =
getNodeWithHighestPoints(replica, nodes, clusterModel.getContext(), optimalAssignment);
// stop immediately if any replica cannot find best assignable node
if (optimalAssignment.hasAnyFailure()) {
String errorMessage = String.format(
"Unable to find any available candidate node for partition %s; Fail reasons: %s",
replica.getPartitionName(), optimalAssignment.getFailures());
throw new HelixRebalanceException(errorMessage,
HelixRebalanceException.Type.FAILED_TO_CALCULATE);
}
maybeBestNode.ifPresent(node -> clusterModel.assign(replica.getResourceName(),
replica.getPartitionName(), replica.getReplicaState(), node.getInstanceName()));
}
}

return optimalAssignment.convertFrom(clusterModel);
}

private Optional<AssignableNode> getNodeWithHighestPoints(AssignableReplica replica,
List<AssignableNode> assignableNodes, ClusterContext clusterContext,
OptimalAssignment optimalAssignment) {
Map<AssignableNode, List<HardConstraint>> hardConstraintFailures = new HashMap<>();
List<AssignableNode> candidateNodes = assignableNodes.stream().filter(candidateNode -> {
boolean isValid = true;
// need to record all the failure reasons and it gives us the ability to debug/fix the runtime
// cluster environment
for (HardConstraint hardConstraint : _hardConstraints) {
if (!hardConstraint.isAssignmentValid(candidateNode, replica, clusterContext)) {
hardConstraintFailures.computeIfAbsent(candidateNode, node -> new ArrayList<>())
.add(hardConstraint);
isValid = false;
}
}
return isValid;
}).collect(Collectors.toList());
if (candidateNodes.isEmpty()) {
optimalAssignment.recordAssignmentFailure(replica,
Maps.transformValues(hardConstraintFailures, this::convertFailureReasons));
return Optional.empty();
}

Function<AssignableNode, Float> calculatePoints =
(candidateNode) -> _softConstraintsWeightModel.getSumOfScores(_softConstraints.stream()
.collect(Collectors.toMap(Function.identity(), softConstraint -> softConstraint
.getAssignmentOriginScore(candidateNode, replica, clusterContext))));

return candidateNodes.stream().max(Comparator.comparing(calculatePoints));
}

private List<String> convertFailureReasons(List<HardConstraint> hardConstraints) {
return hardConstraints.stream().map(HardConstraint::getDescription)
.collect(Collectors.toList());
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
package org.apache.helix.controller.rebalancer.waged.constraints;

/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

import java.util.ArrayList;
import java.util.List;

import org.apache.helix.controller.rebalancer.waged.RebalanceAlgorithm;
import org.apache.helix.model.ClusterConfig;

public class ConstraintBasedAlgorithmFactory {

// TODO: the parameter comes from cluster config, will tune how these 2 integers will change the
// soft constraint weight model
public static RebalanceAlgorithm getInstance() {
// TODO initialize constraints, depending on constraints implementations PRs
List<HardConstraint> hardConstraints = new ArrayList<>();
List<SoftConstraint> softConstraints = new ArrayList<>();
SoftConstraintWeightModel softConstraintWeightModel = new SoftConstraintWeightModel();

return new ConstraintBasedAlgorithm(hardConstraints, softConstraints,
softConstraintWeightModel);
}
}

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -29,8 +29,12 @@
class SoftConstraintWeightModel {
private static Map<? extends SoftConstraint, Float> MODEL;

// TODO either define the weights in property files or zookeeper node or static human input
SoftConstraintWeightModel() {

}

static {
// TODO either define the weights in property files or zookeeper node or static human input
MODEL = ImmutableMap.<SoftConstraint, Float> builder()
.put(LeastPartitionCountConstraint.INSTANCE, 1.0f).build();
}
Expand Down
Loading

0 comments on commit cf8520a

Please sign in to comment.