provider/archive: Converting to datasource. (#8492)

* Converting archive_file to datasource.

* Ratcheting back new dir perms.

* Ratcheting back new dir perms.

* goimports

* Adding output_base64sha256 attribute to archive_file.

Updating docs.

* Dropping CheckDestroy since this is a data source.

* Correcting data source attribute checks.
This commit is contained in:
Brad Sickles 2016-10-25 10:59:06 -04:00 committed by Paul Stack
parent f4a4962b21
commit 65523fa006
5 changed files with 68 additions and 106 deletions

View File

@ -2,6 +2,8 @@ package archive
import (
"crypto/sha1"
"crypto/sha256"
"encoding/base64"
"encoding/hex"
"fmt"
"io/ioutil"
@ -11,13 +13,9 @@ import (
"github.com/hashicorp/terraform/helper/schema"
)
func resourceArchiveFile() *schema.Resource {
func dataSourceFile() *schema.Resource {
return &schema.Resource{
Create: resourceArchiveFileCreate,
Read: resourceArchiveFileRead,
Update: resourceArchiveFileUpdate,
Delete: resourceArchiveFileDelete,
Exists: resourceArchiveFileExists,
Read: dataSourceFileRead,
Schema: map[string]*schema.Schema{
"type": &schema.Schema{
@ -64,50 +62,56 @@ func resourceArchiveFile() *schema.Resource {
ForceNew: true,
Description: "SHA1 checksum of output file",
},
"output_base64sha256": &schema.Schema{
Type: schema.TypeString,
Computed: true,
ForceNew: true,
Description: "Base64 Encoded SHA256 checksum of output file",
},
},
}
}
func resourceArchiveFileCreate(d *schema.ResourceData, meta interface{}) error {
if err := resourceArchiveFileUpdate(d, meta); err != nil {
func dataSourceFileRead(d *schema.ResourceData, meta interface{}) error {
outputPath := d.Get("output_path").(string)
outputDirectory := path.Dir(outputPath)
if outputDirectory != "" {
if _, err := os.Stat(outputDirectory); err != nil {
if err := os.MkdirAll(outputDirectory, 0755); err != nil {
return err
}
}
}
if err := archive(d); err != nil {
return err
}
return resourceArchiveFileRead(d, meta)
}
func resourceArchiveFileRead(d *schema.ResourceData, meta interface{}) error {
outputPath := d.Get("output_path").(string)
// Generate archived file stats
fi, err := os.Stat(outputPath)
if os.IsNotExist(err) {
d.SetId("")
d.MarkNewResource()
return nil
if err != nil {
return err
}
sha, err := genFileSha1(outputPath)
sha1, base64sha256, err := genFileShas(outputPath)
if err != nil {
return fmt.Errorf("could not generate file checksum sha: %s", err)
return fmt.Errorf("could not generate file checksum sha256: %s", err)
}
d.Set("output_sha", sha)
d.Set("output_sha", sha1)
d.Set("output_base64sha256", base64sha256)
d.Set("output_size", fi.Size())
d.SetId(d.Get("output_sha").(string))
return nil
}
func resourceArchiveFileUpdate(d *schema.ResourceData, meta interface{}) error {
func archive(d *schema.ResourceData) error {
archiveType := d.Get("type").(string)
outputPath := d.Get("output_path").(string)
outputDirectory := path.Dir(outputPath)
if outputDirectory != "" {
if _, err := os.Stat(outputDirectory); err != nil {
if err := os.MkdirAll(outputDirectory, 0777); err != nil {
return err
}
}
}
archiver := getArchiver(archiveType, outputPath)
if archiver == nil {
return fmt.Errorf("archive type not supported: %s", archiveType)
@ -129,55 +133,22 @@ func resourceArchiveFileUpdate(d *schema.ResourceData, meta interface{}) error {
} else {
return fmt.Errorf("one of 'source_dir', 'source_file', 'source_content_filename' must be specified")
}
// Generate archived file stats
fi, err := os.Stat(outputPath)
if err != nil {
return err
}
sha, err := genFileSha1(outputPath)
if err != nil {
return fmt.Errorf("could not generate file checksum sha: %s", err)
}
d.Set("output_sha", sha)
d.Set("output_size", fi.Size())
d.SetId(d.Get("output_sha").(string))
return nil
}
func resourceArchiveFileDelete(d *schema.ResourceData, meta interface{}) error {
outputPath := d.Get("output_path").(string)
if _, err := os.Stat(outputPath); os.IsNotExist(err) {
return nil
}
if err := os.Remove(outputPath); err != nil {
return fmt.Errorf("could not delete zip file %q: %s", outputPath, err)
}
return nil
}
func resourceArchiveFileExists(d *schema.ResourceData, meta interface{}) (bool, error) {
outputPath := d.Get("output_path").(string)
_, err := os.Stat(outputPath)
if os.IsNotExist(err) {
return false, nil
}
if err != nil {
return false, err
}
return true, nil
}
func genFileSha1(filename string) (string, error) {
func genFileShas(filename string) (string, string, error) {
data, err := ioutil.ReadFile(filename)
if err != nil {
return "", fmt.Errorf("could not compute file '%s' checksum: %s", filename, err)
return "", "", fmt.Errorf("could not compute file '%s' checksum: %s", filename, err)
}
h := sha1.New()
h.Write([]byte(data))
return hex.EncodeToString(h.Sum(nil)), nil
sha1 := hex.EncodeToString(h.Sum(nil))
h256 := sha256.New()
h256.Write([]byte(data))
shaSum := h256.Sum(nil)
sha256base64 := base64.StdEncoding.EncodeToString(shaSum[:])
return sha1, sha256base64, nil
}

View File

@ -13,29 +13,26 @@ func TestAccArchiveFile_Basic(t *testing.T) {
var fileSize string
r.Test(t, r.TestCase{
Providers: testProviders,
CheckDestroy: r.ComposeTestCheckFunc(
testAccArchiveFileMissing("zip_file_acc_test.zip"),
),
Steps: []r.TestStep{
r.TestStep{
Config: testAccArchiveFileContentConfig,
Check: r.ComposeTestCheckFunc(
testAccArchiveFileExists("zip_file_acc_test.zip", &fileSize),
r.TestCheckResourceAttrPtr("archive_file.foo", "output_size", &fileSize),
r.TestCheckResourceAttrPtr("data.archive_file.foo", "output_size", &fileSize),
),
},
r.TestStep{
Config: testAccArchiveFileFileConfig,
Check: r.ComposeTestCheckFunc(
testAccArchiveFileExists("zip_file_acc_test.zip", &fileSize),
r.TestCheckResourceAttrPtr("archive_file.foo", "output_size", &fileSize),
r.TestCheckResourceAttrPtr("data.archive_file.foo", "output_size", &fileSize),
),
},
r.TestStep{
Config: testAccArchiveFileDirConfig,
Check: r.ComposeTestCheckFunc(
testAccArchiveFileExists("zip_file_acc_test.zip", &fileSize),
r.TestCheckResourceAttrPtr("archive_file.foo", "output_size", &fileSize),
r.TestCheckResourceAttrPtr("data.archive_file.foo", "output_size", &fileSize),
),
},
r.TestStep{
@ -60,21 +57,8 @@ func testAccArchiveFileExists(filename string, fileSize *string) r.TestCheckFunc
}
}
func testAccArchiveFileMissing(filename string) r.TestCheckFunc {
return func(s *terraform.State) error {
_, err := os.Stat(filename)
if err != nil {
if os.IsNotExist(err) {
return nil
}
return err
}
return fmt.Errorf("found file expected to be deleted: %s", filename)
}
}
var testAccArchiveFileContentConfig = `
resource "archive_file" "foo" {
data "archive_file" "foo" {
type = "zip"
source_content = "This is some content"
source_content_filename = "content.txt"
@ -84,7 +68,7 @@ resource "archive_file" "foo" {
var tmpDir = os.TempDir() + "/test"
var testAccArchiveFileOutputPath = fmt.Sprintf(`
resource "archive_file" "foo" {
data "archive_file" "foo" {
type = "zip"
source_content = "This is some content"
source_content_filename = "content.txt"
@ -93,7 +77,7 @@ resource "archive_file" "foo" {
`, tmpDir)
var testAccArchiveFileFileConfig = `
resource "archive_file" "foo" {
data "archive_file" "foo" {
type = "zip"
source_file = "test-fixtures/test-file.txt"
output_path = "zip_file_acc_test.zip"
@ -101,7 +85,7 @@ resource "archive_file" "foo" {
`
var testAccArchiveFileDirConfig = `
resource "archive_file" "foo" {
data "archive_file" "foo" {
type = "zip"
source_dir = "test-fixtures/test-dir"
output_path = "zip_file_acc_test.zip"

View File

@ -7,10 +7,14 @@ import (
func Provider() terraform.ResourceProvider {
return &schema.Provider{
Schema: map[string]*schema.Schema{},
DataSourcesMap: map[string]*schema.Resource{
"archive_file": dataSourceFile(),
},
ResourcesMap: map[string]*schema.Resource{
"archive_file": resourceArchiveFile(),
"archive_file": schema.DataSourceResourceShim(
"archive_file",
dataSourceFile(),
),
},
}
}

View File

@ -1,7 +1,7 @@
---
layout: "archive"
page_title: "Archive: archive_file"
sidebar_current: "docs-archive-resource-file"
sidebar_current: "docs-archive-datasource-archive-file"
description: |-
Generates an archive from content, a file, or directory of files.
---
@ -13,9 +13,9 @@ Generates an archive from content, a file, or directory of files.
## Example Usage
```
resource "archive_file" "init" {
type = "zip"
source_content_filename = "${path.module}/init.tpl"
data "archive_file" "init" {
type = "zip"
source_file = "${path.module}/init.tpl"
output_path = "${path.module}/files/init.zip"
}
```
@ -44,4 +44,7 @@ NOTE: One of `source_content_filename` (with `source_content`), `source_file`, o
The following attributes are exported:
* `output_size` - The size of the output archive file.
* `output_sha` - The SHA1 checksum of output archive file.
* `output_base64sha256` - The base64-encoded SHA256 checksum of output archive file.

View File

@ -10,11 +10,11 @@
<a href="/docs/providers/archive/index.html">Archive Provider</a>
</li>
<li<%= sidebar_current(/^docs-archive-resource/) %>>
<a href="#">Resources</a>
<li<%= sidebar_current(/^docs-archive-datasource/) %>>
<a href="#">Data Sources</a>
<ul class="nav nav-visible">
<li<%= sidebar_current("docs-archive-resource-file") %>>
<a href="/docs/providers/archive/r/file.html">archive_file</a>
<li<%= sidebar_current("docs-archive-datasource-archive-file") %>>
<a href="/docs/providers/archive/d/archive_file.html">archive_file</a>
</li>
</ul>
</li>