From 9a8625e6a69a9c936ba658465130662802c1f506 Mon Sep 17 00:00:00 2001 From: Michael Austin Date: Tue, 26 May 2015 10:44:02 -0400 Subject: [PATCH 1/5] added new provider for creating objects in an s3 bucket --- builtin/providers/aws/provider.go | 1 + .../aws/resource_aws_s3_bucket_object.go | 112 ++++++++++++++++++ .../aws/resource_aws_s3_bucket_object_test.go | 98 +++++++++++++++ 3 files changed, 211 insertions(+) create mode 100644 builtin/providers/aws/resource_aws_s3_bucket_object.go create mode 100644 builtin/providers/aws/resource_aws_s3_bucket_object_test.go diff --git a/builtin/providers/aws/provider.go b/builtin/providers/aws/provider.go index db90549d2..07e16282f 100644 --- a/builtin/providers/aws/provider.go +++ b/builtin/providers/aws/provider.go @@ -120,6 +120,7 @@ func Provider() terraform.ResourceProvider { "aws_route_table_association": resourceAwsRouteTableAssociation(), "aws_route_table": resourceAwsRouteTable(), "aws_s3_bucket": resourceAwsS3Bucket(), + "aws_s3_bucket_object": resourceAwsS3BucketObject(), "aws_security_group": resourceAwsSecurityGroup(), "aws_security_group_rule": resourceAwsSecurityGroupRule(), "aws_sqs_queue": resourceAwsSqsQueue(), diff --git a/builtin/providers/aws/resource_aws_s3_bucket_object.go b/builtin/providers/aws/resource_aws_s3_bucket_object.go new file mode 100644 index 000000000..74eb558c9 --- /dev/null +++ b/builtin/providers/aws/resource_aws_s3_bucket_object.go @@ -0,0 +1,112 @@ +package aws + +import ( + "fmt" + "log" + "os" + + "github.com/hashicorp/terraform/helper/schema" + + "github.com/awslabs/aws-sdk-go/aws" + "github.com/awslabs/aws-sdk-go/aws/awsutil" + "github.com/awslabs/aws-sdk-go/service/s3" +) + +func resourceAwsS3BucketObject() *schema.Resource { + return &schema.Resource{ + Create: resourceAwsS3BucketObjectPut, + Read: resourceAwsS3BucketObjectRead, + Update: resourceAwsS3BucketObjectPut, + Delete: resourceAwsS3BucketObjectDelete, + + Schema: map[string]*schema.Schema{ + "bucket": &schema.Schema{ + Type: schema.TypeString, + Required: true, + ForceNew: true, + }, + + "key": &schema.Schema{ + Type: schema.TypeString, + Required: true, + ForceNew: true, + }, + + "source": &schema.Schema{ + Type: schema.TypeString, + Required: true, + ForceNew: true, + }, + }, + } +} + +func resourceAwsS3BucketObjectPut(d *schema.ResourceData, meta interface{}) error { + s3conn := meta.(*AWSClient).s3conn + + bucket := d.Get("bucket").(string) + key := d.Get("key").(string) + source := d.Get("source").(string) + + file, err := os.Open(source) + + if err != nil { + d.SetId("") + return fmt.Errorf("Error opening S3 bucket object source(%s): %s", source, err) + } + + resp, err := s3conn.PutObject( + &s3.PutObjectInput{ + Bucket: aws.String(bucket), + Key: aws.String(key), + Body: file, + }) + + if err != nil { + d.SetId("") + return fmt.Errorf("Error putting object in S3 bucket (%s): %s", bucket, err) + } + + d.SetId(*resp.ETag) + return nil +} + +func resourceAwsS3BucketObjectRead(d *schema.ResourceData, meta interface{}) error { + s3conn := meta.(*AWSClient).s3conn + + bucket := d.Get("bucket").(string) + key := d.Get("key").(string) + + resp, err := s3conn.HeadObject( + &s3.HeadObjectInput{ + Bucket: aws.String(bucket), + Key: aws.String(key), + IfMatch: aws.String(d.Id()), + }) + + if err != nil { + // if there is an error reading the object we assume it's not there. + d.SetId("") + log.Printf("Error Reading Object (%s): %s", key, err) + } + + log.Printf(awsutil.StringValue(resp)) + return nil +} + +func resourceAwsS3BucketObjectDelete(d *schema.ResourceData, meta interface{}) error { + s3conn := meta.(*AWSClient).s3conn + + bucket := d.Get("bucket").(string) + key := d.Get("key").(string) + + _, err := s3conn.DeleteObject( + &s3.DeleteObjectInput{ + Bucket: aws.String(bucket), + Key: aws.String(key), + }) + if err != nil { + return fmt.Errorf("Error deleting S3 bucket object: %s", err) + } + return nil +} diff --git a/builtin/providers/aws/resource_aws_s3_bucket_object_test.go b/builtin/providers/aws/resource_aws_s3_bucket_object_test.go new file mode 100644 index 000000000..a06232df7 --- /dev/null +++ b/builtin/providers/aws/resource_aws_s3_bucket_object_test.go @@ -0,0 +1,98 @@ +package aws + +import ( + "fmt" + "github.com/hashicorp/terraform/helper/resource" + "github.com/hashicorp/terraform/terraform" + "io/ioutil" + "os" + "testing" + + "github.com/awslabs/aws-sdk-go/aws" + "github.com/awslabs/aws-sdk-go/service/s3" +) + +var tf, err = ioutil.TempFile("", "tf") + +func TestAccAWSS3BucketObject_basic(t *testing.T) { + // first write some data to the tempfile just so it's not 0 bytes. + ioutil.WriteFile(tf.Name(), []byte("{anything will do }"), 0644) + resource.Test(t, resource.TestCase{ + PreCheck: func() { + if err != nil { + panic(err) + } + testAccPreCheck(t) + }, + Providers: testAccProviders, + CheckDestroy: testAccCheckAWSS3BucketObjectDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testAccAWSS3BucketObjectConfig, + Check: testAccCheckAWSS3BucketObjectExists("aws_s3_bucket_object.object"), + }, + }, + }) +} + +func testAccCheckAWSS3BucketObjectDestroy(s *terraform.State) error { + s3conn := testAccProvider.Meta().(*AWSClient).s3conn + + for _, rs := range s.RootModule().Resources { + if rs.Type != "aws_s3_bucket_object" { + continue + } + + _, err := s3conn.HeadObject( + &s3.HeadObjectInput{ + Bucket: aws.String(rs.Primary.Attributes["bucket"]), + Key: aws.String(rs.Primary.Attributes["key"]), + IfMatch: aws.String(rs.Primary.ID), + }) + if err == nil { + return fmt.Errorf("AWS S3 Object still exists: %s", rs.Primary.ID) + } + } + return nil +} + +func testAccCheckAWSS3BucketObjectExists(n string) resource.TestCheckFunc { + return func(s *terraform.State) error { + + defer os.Remove(tf.Name()) + + rs, ok := s.RootModule().Resources[n] + if !ok { + return fmt.Errorf("Not Found: %s", n) + } + + if rs.Primary.ID == "" { + return fmt.Errorf("No S3 Bucket Object ID is set") + } + + s3conn := testAccProvider.Meta().(*AWSClient).s3conn + _, err := s3conn.GetObject( + &s3.GetObjectInput{ + Bucket: aws.String(rs.Primary.Attributes["bucket"]), + Key: aws.String(rs.Primary.Attributes["key"]), + IfMatch: aws.String(rs.Primary.ID), + }) + if err != nil { + return fmt.Errorf("S3Bucket Object error: %s", err) + } + return nil + } +} + +var randomBucket = randInt +var testAccAWSS3BucketObjectConfig = fmt.Sprintf(` +resource "aws_s3_bucket" "object_bucket" { + bucket = "tf-object-test-bucket-%d" +} +resource "aws_s3_bucket_object" "object" { + depends_on = "aws_s3_bucket.object_bucket" + bucket = "tf-object-test-bucket-%d" + key = "test-key" + source = "%s" +} +`, randomBucket, randomBucket, tf.Name()) From 3eceddc34dd158056eba7c7b316cfdb7b276a41a Mon Sep 17 00:00:00 2001 From: Michael Austin Date: Tue, 26 May 2015 11:25:03 -0400 Subject: [PATCH 2/5] added documentation --- .../aws/r/s3_bucket_object.html.markdown | 36 +++++++++++++++++++ 1 file changed, 36 insertions(+) create mode 100644 website/source/docs/providers/aws/r/s3_bucket_object.html.markdown diff --git a/website/source/docs/providers/aws/r/s3_bucket_object.html.markdown b/website/source/docs/providers/aws/r/s3_bucket_object.html.markdown new file mode 100644 index 000000000..3f55a1d45 --- /dev/null +++ b/website/source/docs/providers/aws/r/s3_bucket_object.html.markdown @@ -0,0 +1,36 @@ +--- +layout: "aws" +page_title: "AWS: aws_s3_bucket_object" +side_bar_current: "docs-aws-resource-s3-bucket-object" +description: |- + Provides a S3 bucket object resource. +--- + +# aws\_s3\_bucket\_object + +Provides a S3 bucket object resource. + +## Example Usage + +### Uploading a file to a bucket + +``` +resource "aws_s3_bucket_object" "object" { + bucket = "your_bucket_name" + key = "new_object_key" + source = "path/to/file" +} +``` + +## Argument Reference + +The following arguments are supported: +* `bucket` - (Required) The name of the bucket to put the file in. +* `key` - (Required) The name of the object once it is in the bucket. +* `source` - (Required) The path to the source file being uploaded to the bucket. + +## Attributes Reference + +The following attributes are exported + +* `id` - the id of the resource corresponds to the ETag of the bucket object on aws. From f6bad310ee1076b8ac53bc391dd57300f6b19d11 Mon Sep 17 00:00:00 2001 From: Clint Shryock Date: Thu, 30 Jul 2015 15:17:16 -0500 Subject: [PATCH 3/5] update docs --- .../docs/providers/aws/r/s3_bucket_object.html.markdown | 5 ++++- website/source/layouts/aws.erb | 4 ++++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/website/source/docs/providers/aws/r/s3_bucket_object.html.markdown b/website/source/docs/providers/aws/r/s3_bucket_object.html.markdown index 3f55a1d45..63d201b82 100644 --- a/website/source/docs/providers/aws/r/s3_bucket_object.html.markdown +++ b/website/source/docs/providers/aws/r/s3_bucket_object.html.markdown @@ -25,6 +25,7 @@ resource "aws_s3_bucket_object" "object" { ## Argument Reference The following arguments are supported: + * `bucket` - (Required) The name of the bucket to put the file in. * `key` - (Required) The name of the object once it is in the bucket. * `source` - (Required) The path to the source file being uploaded to the bucket. @@ -33,4 +34,6 @@ The following arguments are supported: The following attributes are exported -* `id` - the id of the resource corresponds to the ETag of the bucket object on aws. +* `id` - the `key` of the resource supplied above +* `etag` - the ETag generated for the object. This is often the MD5 hash of the +object, unless you specify your own encryption keys diff --git a/website/source/layouts/aws.erb b/website/source/layouts/aws.erb index a212bb697..2bbff22f4 100644 --- a/website/source/layouts/aws.erb +++ b/website/source/layouts/aws.erb @@ -279,6 +279,10 @@ aws_s3_bucket + > + aws_s3_bucket_object + + From 44f470514c0a9aca7b60fb87ef12c775518d0986 Mon Sep 17 00:00:00 2001 From: Clint Shryock Date: Thu, 30 Jul 2015 15:17:37 -0500 Subject: [PATCH 4/5] provider/aws: Add S3 Bucket Object (supercedes #2079) --- .../aws/resource_aws_s3_bucket_object.go | 27 +++++++++++-------- .../aws/resource_aws_s3_bucket_object_test.go | 19 ++++++------- 2 files changed, 26 insertions(+), 20 deletions(-) diff --git a/builtin/providers/aws/resource_aws_s3_bucket_object.go b/builtin/providers/aws/resource_aws_s3_bucket_object.go index 74eb558c9..10b260388 100644 --- a/builtin/providers/aws/resource_aws_s3_bucket_object.go +++ b/builtin/providers/aws/resource_aws_s3_bucket_object.go @@ -7,9 +7,8 @@ import ( "github.com/hashicorp/terraform/helper/schema" - "github.com/awslabs/aws-sdk-go/aws" - "github.com/awslabs/aws-sdk-go/aws/awsutil" - "github.com/awslabs/aws-sdk-go/service/s3" + "github.com/aws/aws-sdk-go/aws" + "github.com/aws/aws-sdk-go/service/s3" ) func resourceAwsS3BucketObject() *schema.Resource { @@ -37,6 +36,11 @@ func resourceAwsS3BucketObject() *schema.Resource { Required: true, ForceNew: true, }, + + "etag": &schema.Schema{ + Type: schema.TypeString, + Computed: true, + }, }, } } @@ -51,8 +55,7 @@ func resourceAwsS3BucketObjectPut(d *schema.ResourceData, meta interface{}) erro file, err := os.Open(source) if err != nil { - d.SetId("") - return fmt.Errorf("Error opening S3 bucket object source(%s): %s", source, err) + return fmt.Errorf("Error opening S3 bucket object source (%s): %s", source, err) } resp, err := s3conn.PutObject( @@ -63,11 +66,11 @@ func resourceAwsS3BucketObjectPut(d *schema.ResourceData, meta interface{}) erro }) if err != nil { - d.SetId("") return fmt.Errorf("Error putting object in S3 bucket (%s): %s", bucket, err) } - d.SetId(*resp.ETag) + d.Set("etag", resp.ETag) + d.SetId(key) return nil } @@ -76,21 +79,23 @@ func resourceAwsS3BucketObjectRead(d *schema.ResourceData, meta interface{}) err bucket := d.Get("bucket").(string) key := d.Get("key").(string) + etag := d.Get("etag").(string) resp, err := s3conn.HeadObject( &s3.HeadObjectInput{ - Bucket: aws.String(bucket), - Key: aws.String(key), - IfMatch: aws.String(d.Id()), + Bucket: aws.String(bucket), + Key: aws.String(key), + IfMatch: aws.String(etag), }) if err != nil { // if there is an error reading the object we assume it's not there. d.SetId("") log.Printf("Error Reading Object (%s): %s", key, err) + return nil } - log.Printf(awsutil.StringValue(resp)) + log.Printf("[DEBUG] Reading S3 Bucket Object meta: %s", resp) return nil } diff --git a/builtin/providers/aws/resource_aws_s3_bucket_object_test.go b/builtin/providers/aws/resource_aws_s3_bucket_object_test.go index a06232df7..4f947736a 100644 --- a/builtin/providers/aws/resource_aws_s3_bucket_object_test.go +++ b/builtin/providers/aws/resource_aws_s3_bucket_object_test.go @@ -2,14 +2,15 @@ package aws import ( "fmt" - "github.com/hashicorp/terraform/helper/resource" - "github.com/hashicorp/terraform/terraform" "io/ioutil" "os" "testing" - "github.com/awslabs/aws-sdk-go/aws" - "github.com/awslabs/aws-sdk-go/service/s3" + "github.com/hashicorp/terraform/helper/resource" + "github.com/hashicorp/terraform/terraform" + + "github.com/aws/aws-sdk-go/aws" + "github.com/aws/aws-sdk-go/service/s3" ) var tf, err = ioutil.TempFile("", "tf") @@ -47,7 +48,7 @@ func testAccCheckAWSS3BucketObjectDestroy(s *terraform.State) error { &s3.HeadObjectInput{ Bucket: aws.String(rs.Primary.Attributes["bucket"]), Key: aws.String(rs.Primary.Attributes["key"]), - IfMatch: aws.String(rs.Primary.ID), + IfMatch: aws.String(rs.Primary.Attributes["etag"]), }) if err == nil { return fmt.Errorf("AWS S3 Object still exists: %s", rs.Primary.ID) @@ -75,7 +76,7 @@ func testAccCheckAWSS3BucketObjectExists(n string) resource.TestCheckFunc { &s3.GetObjectInput{ Bucket: aws.String(rs.Primary.Attributes["bucket"]), Key: aws.String(rs.Primary.Attributes["key"]), - IfMatch: aws.String(rs.Primary.ID), + IfMatch: aws.String(rs.Primary.Attributes["etag"]), }) if err != nil { return fmt.Errorf("S3Bucket Object error: %s", err) @@ -89,10 +90,10 @@ var testAccAWSS3BucketObjectConfig = fmt.Sprintf(` resource "aws_s3_bucket" "object_bucket" { bucket = "tf-object-test-bucket-%d" } + resource "aws_s3_bucket_object" "object" { - depends_on = "aws_s3_bucket.object_bucket" - bucket = "tf-object-test-bucket-%d" + bucket = "${aws_s3_bucket.object_bucket.bucket}" key = "test-key" source = "%s" } -`, randomBucket, randomBucket, tf.Name()) +`, randomBucket, tf.Name()) From bfaea76b8617b779651b042b6e4bbfd770adc4a1 Mon Sep 17 00:00:00 2001 From: Clint Shryock Date: Wed, 5 Aug 2015 14:22:29 -0500 Subject: [PATCH 5/5] more tightly scope s3 bucket object error --- .../providers/aws/resource_aws_s3_bucket_object.go | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/builtin/providers/aws/resource_aws_s3_bucket_object.go b/builtin/providers/aws/resource_aws_s3_bucket_object.go index 10b260388..9d46952d0 100644 --- a/builtin/providers/aws/resource_aws_s3_bucket_object.go +++ b/builtin/providers/aws/resource_aws_s3_bucket_object.go @@ -8,6 +8,7 @@ import ( "github.com/hashicorp/terraform/helper/schema" "github.com/aws/aws-sdk-go/aws" + "github.com/aws/aws-sdk-go/aws/awserr" "github.com/aws/aws-sdk-go/service/s3" ) @@ -89,10 +90,13 @@ func resourceAwsS3BucketObjectRead(d *schema.ResourceData, meta interface{}) err }) if err != nil { - // if there is an error reading the object we assume it's not there. - d.SetId("") - log.Printf("Error Reading Object (%s): %s", key, err) - return nil + // If S3 returns a 404 Request Failure, mark the object as destroyed + if awsErr, ok := err.(awserr.RequestFailure); ok && awsErr.StatusCode() == 404 { + d.SetId("") + log.Printf("[WARN] Error Reading Object (%s), object not found (HTTP status 404)", key) + return nil + } + return err } log.Printf("[DEBUG] Reading S3 Bucket Object meta: %s", resp)