Implement archive provider and "archive_file" resource. (#7322)

This commit is contained in:
Brad Sickles 2016-08-07 20:56:44 -04:00 committed by Paul Stack
parent 725d60ab57
commit 70cadcf31d
19 changed files with 647 additions and 0 deletions

View File

@ -0,0 +1,12 @@
package main
import (
"github.com/hashicorp/terraform/builtin/providers/archive"
"github.com/hashicorp/terraform/plugin"
)
func main() {
plugin.Serve(&plugin.ServeOpts{
ProviderFunc: archive.Provider,
})
}

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@ -0,0 +1,47 @@
package archive
import (
"fmt"
"os"
)
type Archiver interface {
ArchiveContent(content []byte, infilename string) error
ArchiveFile(infilename string) error
ArchiveDir(indirname string) error
}
type ArchiverBuilder func(filepath string) Archiver
var archiverBuilders = map[string]ArchiverBuilder{
"zip": NewZipArchiver,
}
func getArchiver(archiveType string, filepath string) Archiver {
if builder, ok := archiverBuilders[archiveType]; ok {
return builder(filepath)
}
return nil
}
func assertValidFile(infilename string) (os.FileInfo, error) {
fi, err := os.Stat(infilename)
if err != nil && os.IsNotExist(err) {
return fi, fmt.Errorf("could not archive missing file: %s", infilename)
}
return fi, err
}
func assertValidDir(indirname string) (os.FileInfo, error) {
fi, err := os.Stat(indirname)
if err != nil {
if os.IsNotExist(err) {
return fi, fmt.Errorf("could not archive missing directory: %s", indirname)
}
return fi, err
}
if !fi.IsDir() {
return fi, fmt.Errorf("could not archive directory that is a file: %s", indirname)
}
return fi, nil
}

View File

@ -0,0 +1,16 @@
package archive
import (
"github.com/hashicorp/terraform/helper/schema"
"github.com/hashicorp/terraform/terraform"
)
func Provider() terraform.ResourceProvider {
return &schema.Provider{
Schema: map[string]*schema.Schema{},
ResourcesMap: map[string]*schema.Resource{
"archive_file": resourceArchiveFile(),
},
}
}

View File

@ -0,0 +1,18 @@
package archive
import (
"testing"
"github.com/hashicorp/terraform/helper/schema"
"github.com/hashicorp/terraform/terraform"
)
var testProviders = map[string]terraform.ResourceProvider{
"archive": Provider(),
}
func TestProvider(t *testing.T) {
if err := Provider().(*schema.Provider).InternalValidate(); err != nil {
t.Fatalf("err: %s", err)
}
}

View File

@ -0,0 +1,174 @@
package archive
import (
"crypto/sha1"
"encoding/hex"
"fmt"
"github.com/hashicorp/terraform/helper/schema"
"io/ioutil"
"os"
)
func resourceArchiveFile() *schema.Resource {
return &schema.Resource{
Create: resourceArchiveFileCreate,
Read: resourceArchiveFileRead,
Update: resourceArchiveFileUpdate,
Delete: resourceArchiveFileDelete,
Exists: resourceArchiveFileExists,
Schema: map[string]*schema.Schema{
"type": &schema.Schema{
Type: schema.TypeString,
Required: true,
ForceNew: true,
},
"source_content": &schema.Schema{
Type: schema.TypeString,
Optional: true,
ForceNew: true,
ConflictsWith: []string{"source_file", "source_dir"},
},
"source_content_filename": &schema.Schema{
Type: schema.TypeString,
Optional: true,
ForceNew: true,
ConflictsWith: []string{"source_file", "source_dir"},
},
"source_file": &schema.Schema{
Type: schema.TypeString,
Optional: true,
ForceNew: true,
ConflictsWith: []string{"source_content", "source_content_filename", "source_dir"},
},
"source_dir": &schema.Schema{
Type: schema.TypeString,
Optional: true,
ForceNew: true,
ConflictsWith: []string{"source_content", "source_content_filename", "source_file"},
},
"output_path": &schema.Schema{
Type: schema.TypeString,
Required: true,
},
"output_size": &schema.Schema{
Type: schema.TypeInt,
Computed: true,
ForceNew: true,
},
"output_sha": &schema.Schema{
Type: schema.TypeString,
Computed: true,
ForceNew: true,
Description: "SHA1 checksum of output file",
},
},
}
}
func resourceArchiveFileCreate(d *schema.ResourceData, meta interface{}) error {
if err := resourceArchiveFileUpdate(d, meta); err != nil {
return err
}
return resourceArchiveFileRead(d, meta)
}
func resourceArchiveFileRead(d *schema.ResourceData, meta interface{}) error {
output_path := d.Get("output_path").(string)
fi, err := os.Stat(output_path)
if os.IsNotExist(err) {
d.SetId("")
d.MarkNewResource()
return nil
}
sha, err := genFileSha1(fi.Name())
if err != nil {
return fmt.Errorf("could not generate file checksum sha: %s", err)
}
d.Set("output_sha", sha)
d.Set("output_size", fi.Size())
d.SetId(d.Get("output_sha").(string))
return nil
}
func resourceArchiveFileUpdate(d *schema.ResourceData, meta interface{}) error {
archiveType := d.Get("type").(string)
outputPath := d.Get("output_path").(string)
archiver := getArchiver(archiveType, outputPath)
if archiver == nil {
return fmt.Errorf("archive type not supported: %s", archiveType)
}
if dir, ok := d.GetOk("source_dir"); ok {
if err := archiver.ArchiveDir(dir.(string)); err != nil {
return fmt.Errorf("error archiving directory: %s", err)
}
} else if file, ok := d.GetOk("source_file"); ok {
if err := archiver.ArchiveFile(file.(string)); err != nil {
return fmt.Errorf("error archiving file: %s", err)
}
} else if filename, ok := d.GetOk("source_content_filename"); ok {
content := d.Get("source_content").(string)
if err := archiver.ArchiveContent([]byte(content), filename.(string)); err != nil {
return fmt.Errorf("error archiving content: %s", err)
}
} else {
return fmt.Errorf("one of 'source_dir', 'source_file', 'source_content_filename' must be specified")
}
// Generate archived file stats
output_path := d.Get("output_path").(string)
fi, err := os.Stat(output_path)
if err != nil {
return err
}
sha, err := genFileSha1(fi.Name())
if err != nil {
return fmt.Errorf("could not generate file checksum sha: %s", err)
}
d.Set("output_sha", sha)
d.Set("output_size", fi.Size())
d.SetId(d.Get("output_sha").(string))
return nil
}
func resourceArchiveFileDelete(d *schema.ResourceData, meta interface{}) error {
output_path := d.Get("output_path").(string)
fi, err := os.Stat(output_path)
if os.IsNotExist(err) {
return nil
}
if err := os.Remove(fi.Name()); err != nil {
return fmt.Errorf("could not delete zip file '%s': %s", fi.Name(), err)
}
return nil
}
func resourceArchiveFileExists(d *schema.ResourceData, meta interface{}) (bool, error) {
output_path := d.Get("output_path").(string)
_, err := os.Stat(output_path)
if os.IsNotExist(err) {
return false, nil
}
if err != nil {
return false, err
}
return true, nil
}
func genFileSha1(filename string) (string, error) {
data, err := ioutil.ReadFile(filename)
if err != nil {
return "", fmt.Errorf("could not compute file '%s' checksum: %s", filename, err)
}
h := sha1.New()
h.Write([]byte(data))
return hex.EncodeToString(h.Sum(nil)), nil
}

View File

@ -0,0 +1,92 @@
package archive
import (
"fmt"
r "github.com/hashicorp/terraform/helper/resource"
"github.com/hashicorp/terraform/terraform"
"os"
"testing"
)
func TestAccArchiveFile_Basic(t *testing.T) {
var fileSize string
r.Test(t, r.TestCase{
Providers: testProviders,
CheckDestroy: r.ComposeTestCheckFunc(
testAccArchiveFileMissing("zip_file_acc_test.zip"),
),
Steps: []r.TestStep{
r.TestStep{
Config: testAccArchiveFileContentConfig,
Check: r.ComposeTestCheckFunc(
testAccArchiveFileExists("zip_file_acc_test.zip", &fileSize),
r.TestCheckResourceAttrPtr("archive_file.foo", "output_size", &fileSize),
),
},
r.TestStep{
Config: testAccArchiveFileFileConfig,
Check: r.ComposeTestCheckFunc(
testAccArchiveFileExists("zip_file_acc_test.zip", &fileSize),
r.TestCheckResourceAttrPtr("archive_file.foo", "output_size", &fileSize),
),
},
r.TestStep{
Config: testAccArchiveFileDirConfig,
Check: r.ComposeTestCheckFunc(
testAccArchiveFileExists("zip_file_acc_test.zip", &fileSize),
r.TestCheckResourceAttrPtr("archive_file.foo", "output_size", &fileSize),
),
},
},
})
}
func testAccArchiveFileExists(filename string, fileSize *string) r.TestCheckFunc {
return func(s *terraform.State) error {
*fileSize = ""
fi, err := os.Stat(filename)
if err != nil {
return err
}
*fileSize = fmt.Sprintf("%d", fi.Size())
return nil
}
}
func testAccArchiveFileMissing(filename string) r.TestCheckFunc {
return func(s *terraform.State) error {
_, err := os.Stat(filename)
if err != nil {
if os.IsNotExist(err) {
return nil
}
return err
}
return fmt.Errorf("found file expected to be deleted: %s", filename)
}
}
var testAccArchiveFileContentConfig = `
resource "archive_file" "foo" {
type = "zip"
source_content = "This is some content"
source_content_filename = "content.txt"
output_path = "zip_file_acc_test.zip"
}
`
var testAccArchiveFileFileConfig = `
resource "archive_file" "foo" {
type = "zip"
source_file = "test-fixtures/test-file.txt"
output_path = "zip_file_acc_test.zip"
}
`
var testAccArchiveFileDirConfig = `
resource "archive_file" "foo" {
type = "zip"
source_dir = "test-fixtures/test-dir"
output_path = "zip_file_acc_test.zip"
}
`

View File

@ -0,0 +1 @@
This is file 1

View File

@ -0,0 +1 @@
This is file 2

View File

@ -0,0 +1 @@
This is file 3

View File

@ -0,0 +1 @@
This is test content

View File

@ -0,0 +1,107 @@
package archive
import (
"archive/zip"
"fmt"
"io/ioutil"
"os"
"path/filepath"
)
type ZipArchiver struct {
filepath string
filewriter *os.File
writer *zip.Writer
}
func NewZipArchiver(filepath string) Archiver {
return &ZipArchiver{
filepath: filepath,
}
}
func (a *ZipArchiver) ArchiveContent(content []byte, infilename string) error {
if err := a.open(); err != nil {
return err
}
defer a.close()
f, err := a.writer.Create(infilename)
if err != nil {
return err
}
_, err = f.Write(content)
return err
}
func (a *ZipArchiver) ArchiveFile(infilename string) error {
fi, err := assertValidFile(infilename)
if err != nil {
return err
}
content, err := ioutil.ReadFile(infilename)
if err != nil {
return err
}
return a.ArchiveContent(content, fi.Name())
}
func (a *ZipArchiver) ArchiveDir(indirname string) error {
_, err := assertValidDir(indirname)
if err != nil {
return err
}
if err := a.open(); err != nil {
return err
}
defer a.close()
return filepath.Walk(indirname, func(path string, info os.FileInfo, err error) error {
if info.IsDir() {
return nil
}
if err != nil {
return err
}
relname, err := filepath.Rel(indirname, path)
if err != nil {
return fmt.Errorf("error relativizing file for archival: %s", err)
}
f, err := a.writer.Create(relname)
if err != nil {
return fmt.Errorf("error creating file inside archive: %s", err)
}
content, err := ioutil.ReadFile(path)
if err != nil {
return fmt.Errorf("error reading file for archival: %s", err)
}
_, err = f.Write(content)
return err
})
}
func (a *ZipArchiver) open() error {
f, err := os.Create(a.filepath)
if err != nil {
return err
}
a.filewriter = f
a.writer = zip.NewWriter(f)
return nil
}
func (a *ZipArchiver) close() {
if a.writer != nil {
a.writer.Close()
a.writer = nil
}
if a.filewriter != nil {
a.filewriter.Close()
a.filewriter = nil
}
}

View File

@ -0,0 +1,84 @@
package archive
import (
"archive/zip"
"io/ioutil"
"testing"
)
func TestZipArchiver_Content(t *testing.T) {
zipfilepath := "archive-content.zip"
archiver := NewZipArchiver(zipfilepath)
if err := archiver.ArchiveContent([]byte("This is some content"), "content.txt"); err != nil {
t.Fatalf("unexpected error: %s", err)
}
ensureContents(t, zipfilepath, map[string][]byte{
"content.txt": []byte("This is some content"),
})
}
func TestZipArchiver_File(t *testing.T) {
zipfilepath := "archive-file.zip"
archiver := NewZipArchiver(zipfilepath)
if err := archiver.ArchiveFile("./test-fixtures/test-file.txt"); err != nil {
t.Fatalf("unexpected error: %s", err)
}
ensureContents(t, zipfilepath, map[string][]byte{
"test-file.txt": []byte("This is test content"),
})
}
func TestZipArchiver_Dir(t *testing.T) {
zipfilepath := "archive-dir.zip"
archiver := NewZipArchiver(zipfilepath)
if err := archiver.ArchiveDir("./test-fixtures/test-dir"); err != nil {
t.Fatalf("unexpected error: %s", err)
}
ensureContents(t, zipfilepath, map[string][]byte{
"file1.txt": []byte("This is file 1"),
"file2.txt": []byte("This is file 2"),
"file3.txt": []byte("This is file 3"),
})
}
func ensureContents(t *testing.T, zipfilepath string, wants map[string][]byte) {
r, err := zip.OpenReader(zipfilepath)
if err != nil {
t.Fatalf("could not open zip file: %s", err)
}
defer r.Close()
if len(r.File) != len(wants) {
t.Errorf("mismatched file count, got %d, want %d", len(r.File), len(wants))
}
for _, cf := range r.File {
ensureContent(t, wants, cf)
}
}
func ensureContent(t *testing.T, wants map[string][]byte, got *zip.File) {
want, ok := wants[got.Name]
if !ok {
t.Errorf("additional file in zip: %s", got.Name)
return
}
r, err := got.Open()
if err != nil {
t.Errorf("could not open file: %s", err)
}
defer r.Close()
gotContentBytes, err := ioutil.ReadAll(r)
if err != nil {
t.Errorf("could not read file: %s", err)
}
wantContent := string(want)
gotContent := string(gotContentBytes)
if gotContent != wantContent {
t.Errorf("mismatched content\ngot\n%s\nwant\n%s", gotContent, wantContent)
}
}

View File

@ -6,6 +6,7 @@
package command
import (
archiveprovider "github.com/hashicorp/terraform/builtin/providers/archive"
atlasprovider "github.com/hashicorp/terraform/builtin/providers/atlas"
awsprovider "github.com/hashicorp/terraform/builtin/providers/aws"
azureprovider "github.com/hashicorp/terraform/builtin/providers/azure"
@ -104,6 +105,7 @@ var InternalProviders = map[string]plugin.ProviderFunc{
"ultradns": ultradnsprovider.Provider,
"vcd": vcdprovider.Provider,
"vsphere": vsphereprovider.Provider,
"archive": archiveprovider.Provider,
}
var InternalProvisioners = map[string]plugin.ProvisionerFunc{

View File

@ -0,0 +1,20 @@
---
layout: "archive"
page_title: "Provider: Archive"
sidebar_current: "docs-archive-index"
description: |-
The Archive provider is used to manage archive files.
---
# Archive Provider
The archive provider exposes resources to manage archive files.
Use the navigation to the left to read about the available resources.
## Example Usage
```
provider "archive" {
}
```

View File

@ -0,0 +1,45 @@
---
layout: "archive"
page_title: "Archive: archive_file"
sidebar_current: "docs-archive-resource-file"
description: |-
Generates an archive from content, a file, or directory of files.
---
# archive\_file
Generates an archive from content, a file, or directory of files.
## Example Usage
```
resource "archive_file" "init" {
template = "${file("${path.module}/init.tpl")}"
}
```
## Argument Reference
The following arguments are supported:
NOTE: One of `source_content_filename` (with `source_content`), `source_file`, or `source_dir` must be specified.
* `type` - (required) The type of archive to generate.
NOTE: `archive` is supported.
* `output_path` - (required) The output of the archive file.
* `source_content` - (optional) Add only this content to the archive with `source_content_filename` as the filename.
* `source_content_filename` - (optional) Set this as the filename when using `source_content`.
* `source_file` - (optional) Package this file into the archive.
* `source_dir` - (optional) Package entire contents of this directory into the archive.
## Attributes Reference
The following attributes are exported:
* `output_size` - The size of the output archive file.
* `output_sha` - The SHA1 checksum of output archive file.

View File

@ -0,0 +1,26 @@
<% wrap_layout :inner do %>
<% content_for :sidebar do %>
<div class="docs-sidebar hidden-print affix-top" role="complementary">
<ul class="nav docs-sidenav">
<li<%= sidebar_current("docs-home") %>>
<a href="/docs/providers/index.html">&laquo; Documentation Home</a>
</li>
<li<%= sidebar_current("docs-archive-index") %>>
<a href="/docs/providers/archive/index.html">Template Provider</a>
</li>
<li<%= sidebar_current(/^docs-archive-resource/) %>>
<a href="#">Resources</a>
<ul class="nav nav-visible">
<li<%= sidebar_current("docs-archive-resource-file") %>>
<a href="/docs/providers/archive/r/file.html">archive_file</a>
</li>
</ul>
</li>
</ul>
</div>
<% end %>
<%= yield %>
<% end %>