Skip to content

Commit

Permalink
#1510 Fix review comments
Browse files Browse the repository at this point in the history
  • Loading branch information
swarajsaaj committed Oct 10, 2020
1 parent ea49cbf commit 7aea765
Show file tree
Hide file tree
Showing 6 changed files with 44 additions and 33 deletions.
34 changes: 19 additions & 15 deletions circuit-breaker/README.md
Expand Up @@ -19,12 +19,15 @@ cannot bring the whole application down, and we can reconnect to the service as

Real world example

> Imagine a web application that has both local files/images and remote database entries to serve.
> The database might not be responding due to a variety of reasons, so if the application keeps
> trying to read from the database using multiple threads/processes, soon all of them will hang
> causing our entire web application will crash. We should be able to detect this situation and show
> the user an appropriate message so that he/she can explore other parts of the app unaffected by
> the database failure.
> Imagine a web application that has both local files/images and remote services that are used for
> fetching data. These remote services may be either healthy and responsive at times, or may become
> slow and unresponsive at some point of time due to variety of reasons. So if one of the remote
> services is slow or not responding successfully, our application will try to fetch response from
> the remote service using multiple threads/processes, soon all of them will hang (also called
> [thread starvation](https://en.wikipedia.org/wiki/Starvation_(computer_science))) causing our entire web application to crash. We should be able to detect
> this situation and show the user an appropriate message so that he/she can explore other parts of
> the app unaffected by the remote service failure. Meanwhile, the other services that are working
> normally, should keep functioning unaffected by this failure.
In plain words

Expand Down Expand Up @@ -65,12 +68,10 @@ public class App {
var serverStartTime = System.nanoTime();

var delayedService = new DelayedRemoteService(serverStartTime, 5);
//Set the circuit Breaker parameters
var delayedServiceCircuitBreaker = new DefaultCircuitBreaker(delayedService, 3000, 2,
2000 * 1000 * 1000);

var quickService = new QuickRemoteService();
//Set the circuit Breaker parameters
var quickServiceCircuitBreaker = new DefaultCircuitBreaker(quickService, 3000, 2,
2000 * 1000 * 1000);

Expand All @@ -95,6 +96,7 @@ public class App {

//Wait for the delayed service to become responsive
try {
LOGGER.info("Waiting for delayed service to become responsive");
Thread.sleep(5000);
} catch (InterruptedException e) {
e.printStackTrace();
Expand Down Expand Up @@ -166,6 +168,7 @@ public class DefaultCircuitBreaker implements CircuitBreaker {
private final long retryTimePeriod;
private final RemoteService service;
long lastFailureTime;
private String lastFailureResponse;
int failureCount;
private final int failureThreshold;
private State state;
Expand Down Expand Up @@ -195,7 +198,7 @@ public class DefaultCircuitBreaker implements CircuitBreaker {
this.failureCount = 0;
}

//Reset everything to defaults
// Reset everything to defaults
@Override
public void recordSuccess() {
this.failureCount = 0;
Expand All @@ -204,12 +207,14 @@ public class DefaultCircuitBreaker implements CircuitBreaker {
}

@Override
public void recordFailure() {
public void recordFailure(String response) {
failureCount = failureCount + 1;
this.lastFailureTime = System.nanoTime();
// Cache the failure response for returning on open state
this.lastFailureResponse = response;
}

//Evaluate the current state based on failureThreshold, failureCount and lastFailureTime.
// Evaluate the current state based on failureThreshold, failureCount and lastFailureTime.
protected void evaluateState() {
if (failureCount >= failureThreshold) { //Then something is wrong with remote service
if ((System.nanoTime() - lastFailureTime) > retryTimePeriod) {
Expand Down Expand Up @@ -263,8 +268,8 @@ public class DefaultCircuitBreaker implements CircuitBreaker {
public String attemptRequest() throws RemoteServiceException {
evaluateState();
if (state == State.OPEN) {
// return cached response if no the circuit is in OPEN state
return "This is stale response from API";
// return cached response if the circuit is in OPEN state
return this.lastFailureResponse;
} else {
// Make the API request if the circuit is not OPEN
try {
Expand All @@ -276,13 +281,12 @@ public class DefaultCircuitBreaker implements CircuitBreaker {
recordSuccess();
return response;
} catch (RemoteServiceException ex) {
recordFailure();
recordFailure(ex.getMessage());
throw ex;
}
}
}
}

```

How does the above pattern prevent failures? Let's understand via this finite state machine
Expand Down
Binary file modified circuit-breaker/etc/ServiceDiagram.PNG
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Expand Up @@ -69,12 +69,10 @@ public static void main(String[] args) {
var serverStartTime = System.nanoTime();

var delayedService = new DelayedRemoteService(serverStartTime, 5);
//Set the circuit Breaker parameters
var delayedServiceCircuitBreaker = new DefaultCircuitBreaker(delayedService, 3000, 2,
2000 * 1000 * 1000);

var quickService = new QuickRemoteService();
//Set the circuit Breaker parameters
var quickServiceCircuitBreaker = new DefaultCircuitBreaker(quickService, 3000, 2,
2000 * 1000 * 1000);

Expand All @@ -99,6 +97,7 @@ public static void main(String[] args) {

//Wait for the delayed service to become responsive
try {
LOGGER.info("Waiting for delayed service to become responsive");
Thread.sleep(5000);
} catch (InterruptedException e) {
e.printStackTrace();
Expand Down
Expand Up @@ -28,18 +28,18 @@
*/
public interface CircuitBreaker {

//Success response. Reset everything to defaults
// Success response. Reset everything to defaults
void recordSuccess();

//Failure response. Handle accordingly and change state if required.
void recordFailure();
// Failure response. Handle accordingly with response and change state if required.
void recordFailure(String response);

//Get the current state of circuit breaker
// Get the current state of circuit breaker
String getState();

//Set the specific state manually.
// Set the specific state manually.
void setState(State state);

//Attempt to fetch response from the remote service.
// Attempt to fetch response from the remote service.
String attemptRequest() throws RemoteServiceException;
}
Expand Up @@ -24,9 +24,8 @@
package com.iluwatar.circuitbreaker;

/**
* The delay based Circuit breaker implementation that works in a
* CLOSED->OPEN-(retry_time_period)->HALF_OPEN->CLOSED flow with some retry time period for failed
* services and a failure threshold for service to open
* The delay based Circuit breaker implementation that works in a CLOSED->OPEN-(retry_time_period)->HALF_OPEN->CLOSED
* flow with some retry time period for failed services and a failure threshold for service to open
* circuit.
*/
public class DefaultCircuitBreaker implements CircuitBreaker {
Expand All @@ -35,6 +34,7 @@ public class DefaultCircuitBreaker implements CircuitBreaker {
private final long retryTimePeriod;
private final RemoteService service;
long lastFailureTime;
private String lastFailureResponse;
int failureCount;
private final int failureThreshold;
private State state;
Expand Down Expand Up @@ -64,7 +64,7 @@ public class DefaultCircuitBreaker implements CircuitBreaker {
this.failureCount = 0;
}

//Reset everything to defaults
// Reset everything to defaults
@Override
public void recordSuccess() {
this.failureCount = 0;
Expand All @@ -73,12 +73,14 @@ public void recordSuccess() {
}

@Override
public void recordFailure() {
public void recordFailure(String response) {
failureCount = failureCount + 1;
this.lastFailureTime = System.nanoTime();
// Cache the failure response for returning on open state
this.lastFailureResponse = response;
}

//Evaluate the current state based on failureThreshold, failureCount and lastFailureTime.
// Evaluate the current state based on failureThreshold, failureCount and lastFailureTime.
protected void evaluateState() {
if (failureCount >= failureThreshold) { //Then something is wrong with remote service
if ((System.nanoTime() - lastFailureTime) > retryTimePeriod) {
Expand Down Expand Up @@ -132,8 +134,8 @@ public void setState(State state) {
public String attemptRequest() throws RemoteServiceException {
evaluateState();
if (state == State.OPEN) {
// return cached response if no the circuit is in OPEN state
return "This is stale response from API";
// return cached response if the circuit is in OPEN state
return this.lastFailureResponse;
} else {
// Make the API request if the circuit is not OPEN
try {
Expand All @@ -145,7 +147,7 @@ public String attemptRequest() throws RemoteServiceException {
recordSuccess();
return response;
} catch (RemoteServiceException ex) {
recordFailure();
recordFailure(ex.getMessage());
throw ex;
}
}
Expand Down
Expand Up @@ -27,12 +27,16 @@

import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/**
* App Test showing usage of circuit breaker.
*/
public class AppTest {

private static final Logger LOGGER = LoggerFactory.getLogger(AppTest.class);

//Startup delay for delayed service (in seconds)
private static final int STARTUP_DELAY = 4;

Expand Down Expand Up @@ -79,7 +83,7 @@ public void testFailure_OpenStateTransition() {
//As failure threshold is "1", the circuit breaker is changed to OPEN
assertEquals("OPEN", delayedServiceCircuitBreaker.getState());
//As circuit state is OPEN, we expect a quick fallback response from circuit breaker.
assertEquals("This is stale response from API", monitoringService.delayedServiceResponse());
assertEquals("Delayed service is down", monitoringService.delayedServiceResponse());

//Meanwhile, the quick service is responding and the circuit state is CLOSED
assertEquals("Quick Service is working", monitoringService.quickServiceResponse());
Expand All @@ -96,6 +100,7 @@ public void testFailure_HalfOpenStateTransition() {

//Waiting for recovery period of 2 seconds for circuit breaker to retry service.
try {
LOGGER.info("Waiting 2s for delayed service to become responsive");
Thread.sleep(2000);
} catch (InterruptedException e) {
e.printStackTrace();
Expand All @@ -114,6 +119,7 @@ public void testRecovery_ClosedStateTransition() {

//Waiting for 4 seconds, which is enough for DelayedService to become healthy and respond successfully.
try {
LOGGER.info("Waiting 4s for delayed service to become responsive");
Thread.sleep(4000);
} catch (InterruptedException e) {
e.printStackTrace();
Expand Down

0 comments on commit 7aea765

Please sign in to comment.