Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Increase timeouts #259

Merged
merged 10 commits into from Mar 10, 2016
3 changes: 2 additions & 1 deletion agent/agent_pool.go
Expand Up @@ -154,7 +154,8 @@ func (r *AgentPool) RegisterAgent(agent *api.Agent) (*api.Agent, error) {
return err
}

err = retry.Do(register, &retry.Config{Maximum: 30, Interval: 1 * time.Second})
// Try to register, retrying every 10 seconds for a maximum of 30 attempts (5 minutes)
err = retry.Do(register, &retry.Config{Maximum: 30, Interval: 10 * time.Second})

return registered, err
}
Expand Down
6 changes: 3 additions & 3 deletions agent/agent_worker.go
Expand Up @@ -167,7 +167,7 @@ func (a *AgentWorker) Connect() error {
}

return err
}, &retry.Config{Maximum: 10, Interval: 1 * time.Second})
}, &retry.Config{Maximum: 10, Interval: 5 * time.Second})
}

// Performs a heatbeat
Expand All @@ -182,7 +182,7 @@ func (a *AgentWorker) Heartbeat() error {
logger.Warn("%s (%s)", err, s)
}
return err
}, &retry.Config{Maximum: 5, Interval: 1 * time.Second})
}, &retry.Config{Maximum: 5, Interval: 5 * time.Second})

if err != nil {
return err
Expand Down Expand Up @@ -263,7 +263,7 @@ func (a *AgentWorker) Ping() {
}

return err
}, &retry.Config{Maximum: 30, Interval: 1 * time.Second})
}, &retry.Config{Maximum: 30, Interval: 5 * time.Second})

// If `accepted` is nil, then the job was never accepted
if accepted == nil {
Expand Down
2 changes: 1 addition & 1 deletion agent/api_client.go
Expand Up @@ -39,7 +39,7 @@ func (a APIClient) Create() *api.Client {

// From the transport, create the a http client
httpClient := transport.Client()
httpClient.Timeout = 10 * time.Second
httpClient.Timeout = 60 * time.Second

// Create the Buildkite Agent API Client
client := api.NewClient(httpClient)
Expand Down
2 changes: 1 addition & 1 deletion agent/artifact_batch_creator.go
Expand Up @@ -60,7 +60,7 @@ func (a *ArtifactBatchCreator) Create() ([]*api.Artifact, error) {
}

return err
}, &retry.Config{Maximum: 10, Interval: 1 * time.Second})
}, &retry.Config{Maximum: 10, Interval: 5 * time.Second})

// Did the batch creation eventually fail?
if err != nil {
Expand Down
4 changes: 2 additions & 2 deletions agent/artifact_uploader.go
Expand Up @@ -219,7 +219,7 @@ func (a *ArtifactUploader) upload(artifacts []*api.Artifact) error {
}

return err
}, &retry.Config{Maximum: 10, Interval: 1 * time.Second})
}, &retry.Config{Maximum: 10, Interval: 5 * time.Second})

if err != nil {
logger.Error("Error uploading artifact states: %s", err)
Expand Down Expand Up @@ -261,7 +261,7 @@ func (a *ArtifactUploader) upload(artifacts []*api.Artifact) error {
}

return err
}, &retry.Config{Maximum: 10, Interval: 1 * time.Second})
}, &retry.Config{Maximum: 10, Interval: 5 * time.Second})

var state string

Expand Down
2 changes: 1 addition & 1 deletion agent/download.go
Expand Up @@ -41,7 +41,7 @@ func (d Download) Start() error {
logger.Warn("Error trying to download %s (%s) %s", d.URL, err, s)
}
return err
}, &retry.Config{Maximum: d.Retries, Interval: 1 * time.Second})
}, &retry.Config{Maximum: d.Retries, Interval: 5 * time.Second})
}

func (d Download) try() error {
Expand Down
6 changes: 3 additions & 3 deletions agent/job_runner.go
Expand Up @@ -203,7 +203,7 @@ func (r *JobRunner) startJob(startedAt time.Time) error {
}

return err
}, &retry.Config{Maximum: 30, Interval: 1 * time.Second})
}, &retry.Config{Maximum: 30, Interval: 5 * time.Second})
}

// Finishes the job in the Buildkite Agent API. This call will keep on retrying
Expand Down Expand Up @@ -294,7 +294,7 @@ func (r *JobRunner) onUploadHeaderTime(cursor int, total int, times map[string]s
}

return err
}, &retry.Config{Maximum: 10, Interval: 1 * time.Second})
}, &retry.Config{Maximum: 10, Interval: 5 * time.Second})
}

// Call when a chunk is ready for upload. It retry the chunk upload with an
Expand All @@ -312,5 +312,5 @@ func (r *JobRunner) onUploadChunk(chunk *LogStreamerChunk) error {
}

return err
}, &retry.Config{Maximum: 10, Interval: 1 * time.Second})
}, &retry.Config{Maximum: 10, Interval: 5 * time.Second})
}
2 changes: 1 addition & 1 deletion clicommand/meta_data_exists.go
Expand Up @@ -84,7 +84,7 @@ var MetaDataExistsCommand = cli.Command{
}

return err
}, &retry.Config{Maximum: 10, Interval: 1 * time.Second})
}, &retry.Config{Maximum: 10, Interval: 5 * time.Second})
if err != nil {
logger.Fatal("Failed to see if meta-data exists: %s", err)
}
Expand Down
2 changes: 1 addition & 1 deletion clicommand/meta_data_get.go
Expand Up @@ -84,7 +84,7 @@ var MetaDataGetCommand = cli.Command{
}

return err
}, &retry.Config{Maximum: 10, Interval: 1 * time.Second})
}, &retry.Config{Maximum: 10, Interval: 5 * time.Second})
if err != nil {
logger.Fatal("Failed to get meta-data: %s", err)
}
Expand Down
2 changes: 1 addition & 1 deletion clicommand/meta_data_set.go
Expand Up @@ -86,7 +86,7 @@ var MetaDataSetCommand = cli.Command{
}

return err
}, &retry.Config{Maximum: 10, Interval: 1 * time.Second})
}, &retry.Config{Maximum: 10, Interval: 5 * time.Second})
if err != nil {
logger.Fatal("Failed to set meta-data: %s", err)
}
Expand Down