Skip to content

Commit

Permalink
Fix bug where redeploy hangs forever because PDS condition is ignored
Browse files Browse the repository at this point in the history
  • Loading branch information
KnVerey committed Apr 6, 2018
1 parent a07bfc1 commit 331c0d3
Show file tree
Hide file tree
Showing 3 changed files with 23 additions and 15 deletions.
16 changes: 13 additions & 3 deletions lib/kubernetes-deploy/kubernetes_resource/deployment.rb
Original file line number Diff line number Diff line change
Expand Up @@ -96,6 +96,16 @@ def validate_definition(_)

private

def current_generation
return -2 unless exists? # different default than observed
@instance_data.dig('metadata', 'generation')
end

def observed_generation
return -1 unless exists? # different default than current
@instance_data.dig('status', 'observedGeneration')
end

def desired_replicas
return -1 unless exists?
@instance_data["spec"]["replicas"].to_i
Expand Down Expand Up @@ -133,12 +143,12 @@ def deploy_failing_to_progress?
# Deployments were being updated prematurely with incorrect progress information
# https://github.com/kubernetes/kubernetes/issues/49637
return false unless Time.now.utc - @deploy_started_at >= progress_deadline.to_i
else
return false unless deploy_started?
end

deploy_started? &&
current_generation == observed_generation &&
progress_condition["status"] == 'False' &&
Time.parse(progress_condition["lastUpdateTime"]).to_i >= (@deploy_started_at - 5.seconds).to_i
progress_condition["reason"] == "ProgressDeadlineExceeded"
end

def find_latest_rs(mediator)
Expand Down
1 change: 1 addition & 0 deletions test/fixtures/for_unit_tests/deployment_test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ kind: Deployment
metadata:
name: web
uid: foobar
generation: 2
annotations:
"deployment.kubernetes.io/revision": "1"
labels:
Expand Down
21 changes: 9 additions & 12 deletions test/unit/kubernetes-deploy/kubernetes_resource/deployment_test.rb
Original file line number Diff line number Diff line change
Expand Up @@ -267,48 +267,45 @@ def test_deploy_timed_out_based_on_progress_deadline
Timecop.freeze do
deployment_status = {
"replicas" => 3,
"observedGeneration" => 2,
"conditions" => [{
"type" => "Progressing",
"status" => 'False',
"lastUpdateTime" => Time.now.utc - 10.seconds,
"reason" => "Failed to progress"
"reason" => "ProgressDeadlineExceeded"
}]
}
deploy = build_synced_deployment(
template: build_deployment_template(status: deployment_status),
replica_sets: [build_rs_template(status: { "replica" => 1 })]
)
deploy.deploy_started_at = Time.now.utc - 3.minutes
refute deploy.deploy_timed_out?, "Deploy not started shouldn't have timed out"

deploy.deploy_started_at = Time.now.utc - 3.minutes
assert deploy.deploy_timed_out?
assert_equal "Timeout reason: Failed to progress\nLatest ReplicaSet: web-1", deploy.timeout_message.strip
assert_equal "Timeout reason: ProgressDeadlineExceeded\nLatest ReplicaSet: web-1", deploy.timeout_message.strip
end
end

def test_deploy_timed_out_based_on_progress_deadline_ignores_conditions_older_than_the_deploy
def test_deploy_timed_out_based_on_progress_deadline_ignores_statuses_for_older_generations
Timecop.freeze do
deployment_status = {
"replicas" => 3,
"observedGeneration" => 1, # current generation is 2
"conditions" => [{
"type" => "Progressing",
"status" => 'False',
"lastUpdateTime" => Time.now.utc - 10.seconds,
"reason" => "Failed to progress"
"reason" => "ProgressDeadlineExceeded"
}]
}
deploy = build_synced_deployment(
template: build_deployment_template(status: deployment_status),
replica_sets: [build_rs_template(status: { "replica" => 1 })]
)

deploy.deploy_started_at = nil # not started yet
deploy.deploy_started_at = Time.now.utc - 20.seconds
refute deploy.deploy_timed_out?

deploy.deploy_started_at = Time.now.utc - 4.seconds # 10s ago is before deploy started
refute deploy.deploy_timed_out?

deploy.deploy_started_at = Time.now.utc - 5.seconds # 10s ago is "equal" to deploy time (fudge for clock skew)
assert deploy.deploy_timed_out?
end
end

Expand Down

0 comments on commit 331c0d3

Please sign in to comment.