retry on s3 backend internal errors

Internal errors from S3 are usually transient, and can be immediately retried.
Make 2 attempts at retreiving the state object before returning an error.
This commit is contained in:
bclodius 2017-10-01 08:22:04 -04:00 committed by James Bardin
parent 71e47e56a0
commit fba02f0bea
1 changed files with 27 additions and 12 deletions

View File

@ -22,7 +22,10 @@ import (
)
// Store the last saved serial in dynamo with this suffix for consistency checks.
const stateIDSuffix = "-md5"
const (
stateIDSuffix = "-md5"
s3ErrCodeInternalError = "InternalError"
)
type RemoteClient struct {
s3Client *s3.S3
@ -92,21 +95,33 @@ func (c *RemoteClient) Get() (payload *remote.Payload, err error) {
}
func (c *RemoteClient) get() (*remote.Payload, error) {
output, err := c.s3Client.GetObject(&s3.GetObjectInput{
var output *s3.GetObjectOutput
var err error
// we immediately retry on an internal error, as those are usually transient
maxRetries := 2
for retryCount := 0; ; retryCount++ {
output, err = c.s3Client.GetObject(&s3.GetObjectInput{
Bucket: &c.bucketName,
Key: &c.path,
})
if err != nil {
if awserr := err.(awserr.Error); awserr != nil {
if awserr.Code() == "NoSuchKey" {
if awserr, ok := err.(awserr.Error); ok {
switch awserr.Code() {
case s3.ErrCodeNoSuchKey:
return nil, nil
} else {
case s3ErrCodeInternalError:
if retryCount > maxRetries {
return nil, err
}
} else {
log.Println("[WARN] s3 internal error, retrying...")
continue
}
}
return nil, err
}
break
}
defer output.Body.Close()