Merge pull request #3192 from lwander/f-gcs-bucket-object

Allow GCE users to upload objects to buckets by file
This commit is contained in:
Dave Cunningham 2015-09-09 12:27:37 -04:00
commit b91df72371
6 changed files with 282 additions and 0 deletions

View File

@ -6,6 +6,7 @@ FEATURES:
* **New resource: `cloudstack_loadbalancer_rule`** [GH-2934] * **New resource: `cloudstack_loadbalancer_rule`** [GH-2934]
* **New resource: `google_compute_project_metadata`** [GH-3065] * **New resource: `google_compute_project_metadata`** [GH-3065]
* **New resources: `aws_ami`, `aws_ami_copy`, `aws_ami_from_instance`** [GH-2874] * **New resources: `aws_ami`, `aws_ami_copy`, `aws_ami_from_instance`** [GH-2874]
* **New resource: `google_storage_bucket_object`** [GH-3192]
IMPROVEMENTS: IMPROVEMENTS:

View File

@ -52,6 +52,7 @@ func Provider() terraform.ResourceProvider {
"google_dns_record_set": resourceDnsRecordSet(), "google_dns_record_set": resourceDnsRecordSet(),
"google_compute_instance_group_manager": resourceComputeInstanceGroupManager(), "google_compute_instance_group_manager": resourceComputeInstanceGroupManager(),
"google_storage_bucket": resourceStorageBucket(), "google_storage_bucket": resourceStorageBucket(),
"google_storage_bucket_object": resourceStorageBucketObject(),
}, },
ConfigureFunc: providerConfigure, ConfigureFunc: providerConfigure,

View File

@ -0,0 +1,132 @@
package google
import (
"os"
"fmt"
"github.com/hashicorp/terraform/helper/schema"
"google.golang.org/api/storage/v1"
)
func resourceStorageBucketObject() *schema.Resource {
return &schema.Resource{
Create: resourceStorageBucketObjectCreate,
Read: resourceStorageBucketObjectRead,
Update: resourceStorageBucketObjectUpdate,
Delete: resourceStorageBucketObjectDelete,
Schema: map[string]*schema.Schema{
"bucket": &schema.Schema{
Type: schema.TypeString,
Required: true,
ForceNew: true,
},
"name": &schema.Schema{
Type: schema.TypeString,
Required: true,
ForceNew: true,
},
"source": &schema.Schema{
Type: schema.TypeString,
Required: true,
ForceNew: true,
},
"predefined_acl": &schema.Schema{
Type: schema.TypeString,
Default: "projectPrivate",
Optional: true,
ForceNew: true,
},
"md5hash": &schema.Schema{
Type: schema.TypeString,
Computed: true,
},
"crc32c": &schema.Schema{
Type: schema.TypeString,
Computed: true,
},
},
}
}
func objectGetId(object *storage.Object) string {
return object.Bucket + "-" + object.Name
}
func resourceStorageBucketObjectCreate(d *schema.ResourceData, meta interface{}) error {
config := meta.(*Config)
bucket := d.Get("bucket").(string)
name := d.Get("name").(string)
source := d.Get("source").(string)
acl := d.Get("predefined_acl").(string)
file, err := os.Open(source)
if err != nil {
return fmt.Errorf("Error opening %s: %s", source, err)
}
objectsService := storage.NewObjectsService(config.clientStorage)
object := &storage.Object{Bucket: bucket}
insertCall := objectsService.Insert(bucket, object)
insertCall.Name(name)
insertCall.Media(file)
insertCall.PredefinedAcl(acl)
_, err = insertCall.Do()
if err != nil {
return fmt.Errorf("Error uploading contents of object %s from %s: %s", name, source, err)
}
return resourceStorageBucketObjectRead(d, meta)
}
func resourceStorageBucketObjectRead(d *schema.ResourceData, meta interface{}) error {
config := meta.(*Config)
bucket := d.Get("bucket").(string)
name := d.Get("name").(string)
objectsService := storage.NewObjectsService(config.clientStorage)
getCall := objectsService.Get(bucket, name)
res, err := getCall.Do()
if err != nil {
return fmt.Errorf("Error retrieving contents of object %s: %s", name, err)
}
d.Set("md5hash", res.Md5Hash)
d.Set("crc32c", res.Crc32c)
d.SetId(objectGetId(res))
return nil
}
func resourceStorageBucketObjectUpdate(d *schema.ResourceData, meta interface{}) error {
// The Cloud storage API doesn't support updating object data contents,
// only metadata. So once we implement metadata we'll have work to do here
return nil
}
func resourceStorageBucketObjectDelete(d *schema.ResourceData, meta interface{}) error {
config := meta.(*Config)
bucket := d.Get("bucket").(string)
name := d.Get("name").(string)
objectsService := storage.NewObjectsService(config.clientStorage)
DeleteCall := objectsService.Delete(bucket, name)
err := DeleteCall.Do()
if err != nil {
return fmt.Errorf("Error deleting contents of object %s: %s", name, err)
}
return nil
}

View File

@ -0,0 +1,102 @@
package google
import (
"fmt"
"testing"
"io/ioutil"
"crypto/md5"
"encoding/base64"
"github.com/hashicorp/terraform/helper/resource"
"github.com/hashicorp/terraform/terraform"
"google.golang.org/api/storage/v1"
)
var tf, err = ioutil.TempFile("", "tf-gce-test")
var bucketName = "tf-gce-bucket-test"
var objectName = "tf-gce-test"
func TestAccGoogleStorageObject_basic(t *testing.T) {
data := []byte("data data data")
h := md5.New()
h.Write(data)
data_md5 := base64.StdEncoding.EncodeToString(h.Sum(nil))
ioutil.WriteFile(tf.Name(), data, 0644)
resource.Test(t, resource.TestCase{
PreCheck: func() {
if err != nil {
panic(err)
}
testAccPreCheck(t)
},
Providers: testAccProviders,
CheckDestroy: testAccGoogleStorageObjectDestroy,
Steps: []resource.TestStep{
resource.TestStep{
Config: testGoogleStorageBucketsObjectBasic,
Check: testAccCheckGoogleStorageObject(bucketName, objectName, data_md5),
},
},
})
}
func testAccCheckGoogleStorageObject(bucket, object, md5 string) resource.TestCheckFunc {
return func(s *terraform.State) error {
config := testAccProvider.Meta().(*Config)
objectsService := storage.NewObjectsService(config.clientStorage)
getCall := objectsService.Get(bucket, object)
res, err := getCall.Do()
if err != nil {
return fmt.Errorf("Error retrieving contents of object %s: %s", object, err)
}
if (md5 != res.Md5Hash) {
return fmt.Errorf("Error contents of %s garbled, md5 hashes don't match (%s, %s)", object, md5, res.Md5Hash)
}
return nil
}
}
func testAccGoogleStorageObjectDestroy(s *terraform.State) error {
config := testAccProvider.Meta().(*Config)
for _, rs := range s.RootModule().Resources {
if rs.Type != "google_storage_bucket_object" {
continue
}
bucket := rs.Primary.Attributes["bucket"]
name := rs.Primary.Attributes["name"]
objectsService := storage.NewObjectsService(config.clientStorage)
getCall := objectsService.Get(bucket, name)
_, err := getCall.Do()
if err == nil {
return fmt.Errorf("Object %s still exists", name)
}
}
return nil
}
var testGoogleStorageBucketsObjectBasic = fmt.Sprintf(`
resource "google_storage_bucket" "bucket" {
name = "%s"
}
resource "google_storage_bucket_object" "object" {
name = "%s"
bucket = "${google_storage_bucket.bucket.name}"
source = "%s"
predefined_acl = "projectPrivate"
}
`, bucketName, objectName, tf.Name())

View File

@ -0,0 +1,42 @@
---
layout: "google"
page_title: "Google: google_storage_bucket_object"
sidebar_current: "docs-google-resource-storage-object"
description: |-
Creates a new object inside a specified bucket
---
# google\_storage\_bucket\_object
Creates a new object inside an exisiting bucket in Google cloud storage service (GCS). Currently, it does not support creating custom ACLs. For more information see [the official documentation](https://cloud.google.com/storage/docs/overview) and [API](https://cloud.google.com/storage/docs/json_api).
## Example Usage
Example creating a public object in an existing `image-store` bucket.
```
resource "google_storage_bucket_object" "picture" {
name = "butterfly01"
source = "/images/nature/garden-tiger-moth.jpg"
bucket = "image-store"
predefined_acl = "publicRead"
}
```
## Argument Reference
The following arguments are supported:
* `name` - (Required) The name of the object.
* `bucket` - (Required) The name of the containing bucket.
* `source` - (Required) A path to the data you want to upload.
* `predefined_acl` - (Optional, Default: 'projectPrivate') The [canned GCS ACL](https://cloud.google.com/storage/docs/access-control#predefined-acl) apply.
## Attributes Reference
The following attributes are exported:
* `md5hash` - (Computed) Base 64 MD5 hash of the uploaded data.
* `crc32c` - (Computed) Base 64 CRC32 hash of the uploaded data.

View File

@ -84,6 +84,10 @@
<li<%= sidebar_current("docs-google-resource-storage-bucket") %>> <li<%= sidebar_current("docs-google-resource-storage-bucket") %>>
<a href="/docs/providers/google/r/storage_bucket.html">google_storage_bucket</a> <a href="/docs/providers/google/r/storage_bucket.html">google_storage_bucket</a>
</li> </li>
<li<%= sidebar_current("docs-google-resource-storage-bucket-object") %>>
<a href="/docs/providers/google/r/storage_bucket_object.html">google_storage_bucket_object</a>
</li>
</ul> </ul>
</li> </li>
</ul> </ul>