Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions lib/splitclient-rb.rb
Original file line number Diff line number Diff line change
Expand Up @@ -85,13 +85,13 @@
require 'splitclient-rb/engine/models/label'
require 'splitclient-rb/engine/models/treatment'
require 'splitclient-rb/engine/auth_api_client'
require 'splitclient-rb/engine/back_off'
require 'splitclient-rb/engine/push_manager'
require 'splitclient-rb/engine/sync_manager'
require 'splitclient-rb/engine/synchronizer'
require 'splitclient-rb/utilitites'

# SSE
require 'splitclient-rb/sse/event_source/back_off'
# SSE
require 'splitclient-rb/sse/event_source/client'
require 'splitclient-rb/sse/event_source/event_parser'
require 'splitclient-rb/sse/event_source/event_types'
Expand Down
1 change: 1 addition & 0 deletions lib/splitclient-rb/engine/api/segments.rb
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ def fetch_segments_by_names(names, fetch_options = { cache_control_headers: fals

names.each do |name|
since = @segments_repository.get_change_number(name)

loop do
segment = fetch_segment_changes(name, since, fetch_options)
@segments_repository.add_to_segment(segment)
Expand Down
26 changes: 26 additions & 0 deletions lib/splitclient-rb/engine/back_off.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
# frozen_string_literal: false

module SplitIoClient
module Engine
BACKOFF_MAX_ALLOWED = 1.8
class BackOff
def initialize(back_off_base, attempt = 0, max_allowed = BACKOFF_MAX_ALLOWED)
@attempt = attempt
@back_off_base = back_off_base
@max_allowed = max_allowed
end

def interval
interval = 0
interval = (@back_off_base * (2**@attempt)) if @attempt.positive?
@attempt += 1

interval >= @max_allowed ? @max_allowed : interval
end

def reset
@attempt = 0
end
end
end
end
2 changes: 1 addition & 1 deletion lib/splitclient-rb/engine/push_manager.rb
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ def initialize(config, sse_handler, api_key, telemetry_runtime_producer)
@sse_handler = sse_handler
@auth_api_client = AuthApiClient.new(@config, telemetry_runtime_producer)
@api_key = api_key
@back_off = SplitIoClient::SSE::EventSource::BackOff.new(@config.auth_retry_back_off_base, 1)
@back_off = Engine::BackOff.new(@config.auth_retry_back_off_base, 1)
@telemetry_runtime_producer = telemetry_runtime_producer
end

Expand Down
64 changes: 56 additions & 8 deletions lib/splitclient-rb/engine/synchronizer.rb
Original file line number Diff line number Diff line change
Expand Up @@ -86,32 +86,80 @@ def fetch_splits(target_change_number)

if result[:success]
@segment_fetcher.fetch_segments_if_not_exists(result[:segment_names], true) unless result[:segment_names].empty?
@config.logger.debug("Refresh completed bypassing the CDN in #{attempts} attempts.")
@config.logger.debug("Refresh completed bypassing the CDN in #{attempts} attempts.") if @config.debug_enabled
else
@config.logger.debug("No changes fetched after #{attempts} attempts with CDN bypassed.")
@config.logger.debug("No changes fetched after #{attempts} attempts with CDN bypassed.") if @config.debug_enabled
end
rescue StandardError => error
@config.log_found_exception(__method__.to_s, error)
end

def fetch_segment(name)
def fetch_segment(name, target_change_number)
return if target_change_number <= @segments_repository.get_change_number(name).to_i

fetch_options = { cache_control_headers: true, till: nil }
@segment_fetcher.fetch_segment(name, fetch_options)
result = attempt_segment_sync(name,
target_change_number,
fetch_options,
@config.on_demand_fetch_max_retries,
@config.on_demand_fetch_retry_delay_seconds,
false)

attempts = @config.on_demand_fetch_max_retries - result[:remaining_attempts]
if result[:success]
@config.logger.debug("Segment #{name} refresh completed in #{attempts} attempts.") if @config.debug_enabled

return
end

fetch_options = { cache_control_headers: true, till: target_change_number }
result = attempt_segment_sync(name,
target_change_number,
fetch_options,
ON_DEMAND_FETCH_BACKOFF_MAX_RETRIES,
nil,
true)

attempts = @config.on_demand_fetch_max_retries - result[:remaining_attempts]
if result[:success]
@config.logger.debug("Segment #{name} refresh completed bypassing the CDN in #{attempts} attempts.") if @config.debug_enabled
else
@config.logger.debug("No changes fetched for segment #{name} after #{attempts} attempts with CDN bypassed.") if @config.debug_enabled
end
rescue StandardError => error
@config.log_found_exception(__method__.to_s, error)
end

private

def attempt_segment_sync(name, target_cn, fetch_options, max_retries, retry_delay_seconds, with_backoff)
remaining_attempts = max_retries
backoff = Engine::BackOff.new(ON_DEMAND_FETCH_BACKOFF_BASE_SECONDS, 0, ON_DEMAND_FETCH_BACKOFF_MAX_WAIT_SECONDS) if with_backoff

loop do
remaining_attempts -= 1

@segment_fetcher.fetch_segment(name, fetch_options)

return sync_result(true, remaining_attempts) if target_cn <= @segments_repository.get_change_number(name).to_i
return sync_result(false, remaining_attempts) if remaining_attempts <= 0

delay = with_backoff ? backoff.interval : retry_delay_seconds
sleep(delay)
end
end

def attempt_splits_sync(target_cn, fetch_options, max_retries, retry_delay_seconds, with_backoff)
remaining_attempts = max_retries
backoff = SSE::EventSource::BackOff.new(ON_DEMAND_FETCH_BACKOFF_BASE_SECONDS, 0, ON_DEMAND_FETCH_BACKOFF_MAX_WAIT_SECONDS) if with_backoff
backoff = Engine::BackOff.new(ON_DEMAND_FETCH_BACKOFF_BASE_SECONDS, 0, ON_DEMAND_FETCH_BACKOFF_MAX_WAIT_SECONDS) if with_backoff

loop do
remaining_attempts -= 1

segment_names = @split_fetcher.fetch_splits(fetch_options)

return split_sync_result(true, remaining_attempts, segment_names) if target_cn <= @splits_repository.get_change_number
return split_sync_result(false, remaining_attempts, segment_names) if remaining_attempts <= 0
return sync_result(true, remaining_attempts, segment_names) if target_cn <= @splits_repository.get_change_number
return sync_result(false, remaining_attempts, segment_names) if remaining_attempts <= 0

delay = with_backoff ? backoff.interval : retry_delay_seconds
sleep(delay)
Expand Down Expand Up @@ -141,7 +189,7 @@ def start_telemetry_sync_task
Telemetry::SyncTask.new(@config, @telemetry_synchronizer).call
end

def split_sync_result(success, remaining_attempts, segment_names)
def sync_result(success, remaining_attempts, segment_names = nil)
{ success: success, remaining_attempts: remaining_attempts, segment_names: segment_names }
end
end
Expand Down
28 changes: 0 additions & 28 deletions lib/splitclient-rb/sse/event_source/back_off.rb

This file was deleted.

7 changes: 1 addition & 6 deletions lib/splitclient-rb/sse/workers/segments_worker.rb
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
module SplitIoClient
module SSE
module Workers
MAX_RETRIES_ALLOWED = 10
class SegmentsWorker
def initialize(synchronizer, config, segments_repository)
@synchronizer = synchronizer
Expand Down Expand Up @@ -52,11 +51,7 @@ def perform
cn = item[:change_number]
@config.logger.debug("SegmentsWorker change_number dequeue #{segment_name}, #{cn}")

attempt = 0
while cn > @segments_repository.get_change_number(segment_name).to_i && attempt <= MAX_RETRIES_ALLOWED
@synchronizer.fetch_segment(segment_name)
attempt += 1
end
@synchronizer.fetch_segment(segment_name, cn)
end
end

Expand Down
76 changes: 71 additions & 5 deletions spec/engine/synchronizer_spec.rb
Original file line number Diff line number Diff line change
Expand Up @@ -96,8 +96,7 @@
expect(a_request(:get, 'https://sdk.split.io/api/splitChanges?since=-1')).to have_been_made.once
end

it 'fetch_splits - ' do
sync = subject.new(repositories, api_key, config, sdk_blocker, parameters)
it 'fetch_splits - with CDN bypassed' do
stub_request(:get, 'https://sdk.split.io/api/splitChanges?since=-1')
.to_return(status: 200, body:
'{
Expand All @@ -122,18 +121,85 @@
"till": 1506703262921
}')

sync.fetch_splits(1_506_703_262_920)
synchronizer.fetch_splits(1_506_703_262_920)

expect(a_request(:get, 'https://sdk.split.io/api/splitChanges?since=-1')).to have_been_made.once
expect(a_request(:get, 'https://sdk.split.io/api/splitChanges?since=1506703262918')).to have_been_made.times(9)
expect(a_request(:get, 'https://sdk.split.io/api/splitChanges?since=1506703262918&till=1506703262920')).to have_been_made.once
end

it 'fetch_segment' do
mock_segment_changes('segment3', segment3, '-1')
stub_request(:get, 'https://sdk.split.io/api/segmentChanges/segment3?since=-1')
.to_return(status: 200, body:
'{
"name": "segment3",
"added": [],
"removed": [],
"since": -1,
"till": 111333
}')

stub_request(:get, 'https://sdk.split.io/api/segmentChanges/segment3?since=111333')
.to_return(status: 200, body:
'{
"name": "segment3",
"added": [],
"removed": [],
"since": 111333,
"till": 111333
}')

synchronizer.fetch_segment('segment3', 111_222)
expect(a_request(:get, 'https://sdk.split.io/api/segmentChanges/segment3?since=-1')).to have_been_made.once
expect(a_request(:get, 'https://sdk.split.io/api/segmentChanges/segment3?since=111333')).to have_been_made.once
end

it 'fetch_segment - with CDN bypassed' do
stub_request(:get, 'https://sdk.split.io/api/segmentChanges/segment3?since=-1')
.to_return(status: 200, body:
'{
"name": "segment3",
"added": [],
"removed": [],
"since": -1,
"till": 111333
}')

stub_request(:get, 'https://sdk.split.io/api/segmentChanges/segment3?since=111333')
.to_return(status: 200, body:
'{
"name": "segment3",
"added": [],
"removed": [],
"since": 111333,
"till": 111333
}')

stub_request(:get, 'https://sdk.split.io/api/segmentChanges/segment3?since=111333&till=111555')
.to_return(status: 200, body:
'{
"name": "segment3",
"added": [],
"removed": [],
"since": 111555,
"till": 111555
}')

stub_request(:get, 'https://sdk.split.io/api/segmentChanges/segment3?since=111555&till=111555')
.to_return(status: 200, body:
'{
"name": "segment3",
"added": [],
"removed": [],
"since": 111555,
"till": 111555
}')

synchronizer.fetch_segment('segment3')
synchronizer.fetch_segment('segment3', 111_555)
expect(a_request(:get, 'https://sdk.split.io/api/segmentChanges/segment3?since=-1')).to have_been_made.once
expect(a_request(:get, 'https://sdk.split.io/api/segmentChanges/segment3?since=111333')).to have_been_made.times(10)
expect(a_request(:get, 'https://sdk.split.io/api/segmentChanges/segment3?since=111333&till=111555')).to have_been_made.once
expect(a_request(:get, 'https://sdk.split.io/api/segmentChanges/segment3?since=111555&till=111555')).to have_been_made.once
end
end

Expand Down
4 changes: 2 additions & 2 deletions spec/sse/event_source/back_off_spec.rb
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,8 @@
require 'spec_helper'
require 'http_server_mock'

describe SplitIoClient::SSE::EventSource::BackOff do
subject { SplitIoClient::SSE::EventSource::BackOff }
describe SplitIoClient::Engine::BackOff do
subject { SplitIoClient::Engine::BackOff }

let(:log) { StringIO.new }

Expand Down
2 changes: 1 addition & 1 deletion spec/sse/sse_handler_spec.rb
Original file line number Diff line number Diff line change
Expand Up @@ -212,7 +212,7 @@

expect(action_event).to eq(SplitIoClient::Constants::PUSH_CONNECTED)
expect(sse_handler.sse_client.connected?).to eq(true)
expect(a_request(:get, 'https://sdk.split.io/api/segmentChanges/segment1?since=1470947453877')).to have_been_made.times(12)
expect(a_request(:get, 'https://sdk.split.io/api/segmentChanges/segment1?since=1470947453877')).to have_been_made.times(11)

sse_handler.sse_client.close

Expand Down
2 changes: 1 addition & 1 deletion spec/sse/workers/segments_worker_spec.rb
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@

sleep(1)

expect(a_request(:get, 'https://sdk.split.io/api/segmentChanges/segment1?since=1470947453877')).to have_been_made.times(12)
expect(a_request(:get, 'https://sdk.split.io/api/segmentChanges/segment1?since=1470947453877')).to have_been_made.times(11)
end

it 'must not trigger fetch' do
Expand Down