Merge pull request #2898 from hashicorp/f-aws-s3-object-pr-2079

provider/aws: Add S3 Bucket Object (supercedes #2079)
This commit is contained in:
Clint 2015-08-05 14:30:07 -05:00
commit 56e2894ae5
5 changed files with 264 additions and 0 deletions

View File

@ -208,6 +208,7 @@ func Provider() terraform.ResourceProvider {
"aws_route_table": resourceAwsRouteTable(),
"aws_route_table_association": resourceAwsRouteTableAssociation(),
"aws_s3_bucket": resourceAwsS3Bucket(),
"aws_s3_bucket_object": resourceAwsS3BucketObject(),
"aws_security_group": resourceAwsSecurityGroup(),
"aws_security_group_rule": resourceAwsSecurityGroupRule(),
"aws_spot_instance_request": resourceAwsSpotInstanceRequest(),

View File

@ -0,0 +1,121 @@
package aws
import (
"fmt"
"log"
"os"
"github.com/hashicorp/terraform/helper/schema"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/awserr"
"github.com/aws/aws-sdk-go/service/s3"
)
func resourceAwsS3BucketObject() *schema.Resource {
return &schema.Resource{
Create: resourceAwsS3BucketObjectPut,
Read: resourceAwsS3BucketObjectRead,
Update: resourceAwsS3BucketObjectPut,
Delete: resourceAwsS3BucketObjectDelete,
Schema: map[string]*schema.Schema{
"bucket": &schema.Schema{
Type: schema.TypeString,
Required: true,
ForceNew: true,
},
"key": &schema.Schema{
Type: schema.TypeString,
Required: true,
ForceNew: true,
},
"source": &schema.Schema{
Type: schema.TypeString,
Required: true,
ForceNew: true,
},
"etag": &schema.Schema{
Type: schema.TypeString,
Computed: true,
},
},
}
}
func resourceAwsS3BucketObjectPut(d *schema.ResourceData, meta interface{}) error {
s3conn := meta.(*AWSClient).s3conn
bucket := d.Get("bucket").(string)
key := d.Get("key").(string)
source := d.Get("source").(string)
file, err := os.Open(source)
if err != nil {
return fmt.Errorf("Error opening S3 bucket object source (%s): %s", source, err)
}
resp, err := s3conn.PutObject(
&s3.PutObjectInput{
Bucket: aws.String(bucket),
Key: aws.String(key),
Body: file,
})
if err != nil {
return fmt.Errorf("Error putting object in S3 bucket (%s): %s", bucket, err)
}
d.Set("etag", resp.ETag)
d.SetId(key)
return nil
}
func resourceAwsS3BucketObjectRead(d *schema.ResourceData, meta interface{}) error {
s3conn := meta.(*AWSClient).s3conn
bucket := d.Get("bucket").(string)
key := d.Get("key").(string)
etag := d.Get("etag").(string)
resp, err := s3conn.HeadObject(
&s3.HeadObjectInput{
Bucket: aws.String(bucket),
Key: aws.String(key),
IfMatch: aws.String(etag),
})
if err != nil {
// If S3 returns a 404 Request Failure, mark the object as destroyed
if awsErr, ok := err.(awserr.RequestFailure); ok && awsErr.StatusCode() == 404 {
d.SetId("")
log.Printf("[WARN] Error Reading Object (%s), object not found (HTTP status 404)", key)
return nil
}
return err
}
log.Printf("[DEBUG] Reading S3 Bucket Object meta: %s", resp)
return nil
}
func resourceAwsS3BucketObjectDelete(d *schema.ResourceData, meta interface{}) error {
s3conn := meta.(*AWSClient).s3conn
bucket := d.Get("bucket").(string)
key := d.Get("key").(string)
_, err := s3conn.DeleteObject(
&s3.DeleteObjectInput{
Bucket: aws.String(bucket),
Key: aws.String(key),
})
if err != nil {
return fmt.Errorf("Error deleting S3 bucket object: %s", err)
}
return nil
}

View File

@ -0,0 +1,99 @@
package aws
import (
"fmt"
"io/ioutil"
"os"
"testing"
"github.com/hashicorp/terraform/helper/resource"
"github.com/hashicorp/terraform/terraform"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/service/s3"
)
var tf, err = ioutil.TempFile("", "tf")
func TestAccAWSS3BucketObject_basic(t *testing.T) {
// first write some data to the tempfile just so it's not 0 bytes.
ioutil.WriteFile(tf.Name(), []byte("{anything will do }"), 0644)
resource.Test(t, resource.TestCase{
PreCheck: func() {
if err != nil {
panic(err)
}
testAccPreCheck(t)
},
Providers: testAccProviders,
CheckDestroy: testAccCheckAWSS3BucketObjectDestroy,
Steps: []resource.TestStep{
resource.TestStep{
Config: testAccAWSS3BucketObjectConfig,
Check: testAccCheckAWSS3BucketObjectExists("aws_s3_bucket_object.object"),
},
},
})
}
func testAccCheckAWSS3BucketObjectDestroy(s *terraform.State) error {
s3conn := testAccProvider.Meta().(*AWSClient).s3conn
for _, rs := range s.RootModule().Resources {
if rs.Type != "aws_s3_bucket_object" {
continue
}
_, err := s3conn.HeadObject(
&s3.HeadObjectInput{
Bucket: aws.String(rs.Primary.Attributes["bucket"]),
Key: aws.String(rs.Primary.Attributes["key"]),
IfMatch: aws.String(rs.Primary.Attributes["etag"]),
})
if err == nil {
return fmt.Errorf("AWS S3 Object still exists: %s", rs.Primary.ID)
}
}
return nil
}
func testAccCheckAWSS3BucketObjectExists(n string) resource.TestCheckFunc {
return func(s *terraform.State) error {
defer os.Remove(tf.Name())
rs, ok := s.RootModule().Resources[n]
if !ok {
return fmt.Errorf("Not Found: %s", n)
}
if rs.Primary.ID == "" {
return fmt.Errorf("No S3 Bucket Object ID is set")
}
s3conn := testAccProvider.Meta().(*AWSClient).s3conn
_, err := s3conn.GetObject(
&s3.GetObjectInput{
Bucket: aws.String(rs.Primary.Attributes["bucket"]),
Key: aws.String(rs.Primary.Attributes["key"]),
IfMatch: aws.String(rs.Primary.Attributes["etag"]),
})
if err != nil {
return fmt.Errorf("S3Bucket Object error: %s", err)
}
return nil
}
}
var randomBucket = randInt
var testAccAWSS3BucketObjectConfig = fmt.Sprintf(`
resource "aws_s3_bucket" "object_bucket" {
bucket = "tf-object-test-bucket-%d"
}
resource "aws_s3_bucket_object" "object" {
bucket = "${aws_s3_bucket.object_bucket.bucket}"
key = "test-key"
source = "%s"
}
`, randomBucket, tf.Name())

View File

@ -0,0 +1,39 @@
---
layout: "aws"
page_title: "AWS: aws_s3_bucket_object"
side_bar_current: "docs-aws-resource-s3-bucket-object"
description: |-
Provides a S3 bucket object resource.
---
# aws\_s3\_bucket\_object
Provides a S3 bucket object resource.
## Example Usage
### Uploading a file to a bucket
```
resource "aws_s3_bucket_object" "object" {
bucket = "your_bucket_name"
key = "new_object_key"
source = "path/to/file"
}
```
## Argument Reference
The following arguments are supported:
* `bucket` - (Required) The name of the bucket to put the file in.
* `key` - (Required) The name of the object once it is in the bucket.
* `source` - (Required) The path to the source file being uploaded to the bucket.
## Attributes Reference
The following attributes are exported
* `id` - the `key` of the resource supplied above
* `etag` - the ETag generated for the object. This is often the MD5 hash of the
object, unless you specify your own encryption keys

View File

@ -279,6 +279,10 @@
<a href="/docs/providers/aws/r/s3_bucket.html">aws_s3_bucket</a>
</li>
<li<%= sidebar_current("docs-aws-resource-s3-bucket-object") %>>
<a href="/docs/providers/aws/r/s3_bucket_object.html">aws_s3_bucket_object</a>
</li>
</ul>
</li>