provider/aws: New resource codepipeline (#11814)

* provider/aws: New resource codepipeline

* Vendor aws/codepipeline

* Add tests

* Add docs

* Bump codepipeline to v1.6.25

* Adjustments based on feedback

* Force new resource on ID change

* Improve tests

* Switch update to read

Since we don't require a second pass, only do a read.

* Skip tests if GITHUB_TOKEN is not set
This commit is contained in:
Erik Jansson 2017-02-22 18:31:24 +01:00 committed by Paul Stack
parent dc7f267758
commit 62aa2c583a
11 changed files with 8497 additions and 1 deletions

View File

@ -27,6 +27,7 @@ import (
"github.com/aws/aws-sdk-go/service/codebuild"
"github.com/aws/aws-sdk-go/service/codecommit"
"github.com/aws/aws-sdk-go/service/codedeploy"
"github.com/aws/aws-sdk-go/service/codepipeline"
"github.com/aws/aws-sdk-go/service/configservice"
"github.com/aws/aws-sdk-go/service/databasemigrationservice"
"github.com/aws/aws-sdk-go/service/directoryservice"
@ -152,6 +153,7 @@ type AWSClient struct {
codebuildconn *codebuild.CodeBuild
codedeployconn *codedeploy.CodeDeploy
codecommitconn *codecommit.CodeCommit
codepipelineconn *codepipeline.CodePipeline
sfnconn *sfn.SFN
ssmconn *ssm.SSM
wafconn *waf.WAF
@ -285,6 +287,7 @@ func (c *Config) Client() (interface{}, error) {
client.codedeployconn = codedeploy.New(sess)
client.configconn = configservice.New(sess)
client.dmsconn = databasemigrationservice.New(sess)
client.codepipelineconn = codepipeline.New(sess)
client.dsconn = directoryservice.New(sess)
client.dynamodbconn = dynamodb.New(dynamoSess)
client.ec2conn = ec2.New(awsEc2Sess)

View File

@ -0,0 +1,34 @@
package aws
import (
"os"
"testing"
"github.com/hashicorp/terraform/helper/acctest"
"github.com/hashicorp/terraform/helper/resource"
)
func TestAccAWSCodePipeline_Import_basic(t *testing.T) {
if os.Getenv("GITHUB_TOKEN") == "" {
t.Skip("Environment variable GITHUB_TOKEN is not set")
}
name := acctest.RandString(10)
resource.Test(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckAWSCodePipelineDestroy,
Steps: []resource.TestStep{
resource.TestStep{
Config: testAccAWSCodePipelineConfig_basic(name),
},
resource.TestStep{
ResourceName: "aws_codepipeline.bar",
ImportState: true,
ImportStateVerify: true,
},
},
})
}

View File

@ -251,6 +251,7 @@ func Provider() terraform.ResourceProvider {
"aws_codecommit_repository": resourceAwsCodeCommitRepository(),
"aws_codecommit_trigger": resourceAwsCodeCommitTrigger(),
"aws_codebuild_project": resourceAwsCodeBuildProject(),
"aws_codepipeline": resourceAwsCodePipeline(),
"aws_customer_gateway": resourceAwsCustomerGateway(),
"aws_db_event_subscription": resourceAwsDbEventSubscription(),
"aws_db_instance": resourceAwsDbInstance(),

View File

@ -0,0 +1,493 @@
package aws
import (
"fmt"
"os"
"time"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/service/codepipeline"
"github.com/hashicorp/terraform/helper/resource"
"github.com/hashicorp/terraform/helper/schema"
)
func resourceAwsCodePipeline() *schema.Resource {
return &schema.Resource{
Create: resourceAwsCodePipelineCreate,
Read: resourceAwsCodePipelineRead,
Update: resourceAwsCodePipelineUpdate,
Delete: resourceAwsCodePipelineDelete,
Importer: &schema.ResourceImporter{
State: schema.ImportStatePassthrough,
},
Schema: map[string]*schema.Schema{
"name": {
Type: schema.TypeString,
Required: true,
ForceNew: true,
},
"role_arn": {
Type: schema.TypeString,
Required: true,
},
"artifact_store": {
Type: schema.TypeList,
Required: true,
MaxItems: 1,
Elem: &schema.Resource{
Schema: map[string]*schema.Schema{
"location": {
Type: schema.TypeString,
Required: true,
},
"type": {
Type: schema.TypeString,
Required: true,
ValidateFunc: validateAwsCodePipelineArtifactStoreType,
},
"encryption_key": {
Type: schema.TypeList,
MaxItems: 1,
Optional: true,
Elem: &schema.Resource{
Schema: map[string]*schema.Schema{
"id": {
Type: schema.TypeString,
Required: true,
},
"type": {
Type: schema.TypeString,
Required: true,
ValidateFunc: validateAwsCodePipelineEncryptionKeyType,
},
},
},
},
},
},
},
"stage": {
Type: schema.TypeList,
MinItems: 2,
Required: true,
Elem: &schema.Resource{
Schema: map[string]*schema.Schema{
"name": {
Type: schema.TypeString,
Required: true,
},
"action": {
Type: schema.TypeList,
Required: true,
Elem: &schema.Resource{
Schema: map[string]*schema.Schema{
"configuration": {
Type: schema.TypeMap,
Optional: true,
},
"category": {
Type: schema.TypeString,
Required: true,
ValidateFunc: validateAwsCodePipelineStageActionCategory,
},
"owner": {
Type: schema.TypeString,
Required: true,
ValidateFunc: validateAwsCodePipelineStageActionOwner,
},
"provider": {
Type: schema.TypeString,
Required: true,
},
"version": {
Type: schema.TypeString,
Required: true,
},
"input_artifacts": {
Type: schema.TypeList,
Optional: true,
Elem: &schema.Schema{Type: schema.TypeString},
},
"output_artifacts": {
Type: schema.TypeList,
Optional: true,
Elem: &schema.Schema{Type: schema.TypeString},
},
"name": {
Type: schema.TypeString,
Required: true,
},
"role_arn": {
Type: schema.TypeString,
Optional: true,
},
"run_order": {
Type: schema.TypeInt,
Optional: true,
Computed: true,
},
},
},
},
},
},
},
},
}
}
func validateAwsCodePipelineEncryptionKeyType(v interface{}, k string) (ws []string, errors []error) {
if v.(string) != "KMS" {
errors = append(errors, fmt.Errorf("CodePipeline: encryption_key type can only be KMS"))
}
return
}
func validateAwsCodePipelineArtifactStoreType(v interface{}, k string) (ws []string, errors []error) {
if v.(string) != "S3" {
errors = append(errors, fmt.Errorf("CodePipeline: artifact_store type can only be S3"))
}
return
}
func validateAwsCodePipelineStageActionCategory(v interface{}, k string) (ws []string, errors []error) {
value := v.(string)
types := map[string]bool{
"Source": true,
"Build": true,
"Deploy": true,
"Test": true,
"Invoke": true,
"Approval": true,
}
if !types[value] {
errors = append(errors, fmt.Errorf("CodePipeline: category can only be one of Source | Build | Deploy | Test | Invoke | Approval"))
}
return
}
func validateAwsCodePipelineStageActionOwner(v interface{}, k string) (ws []string, errors []error) {
value := v.(string)
types := map[string]bool{
"AWS": true,
"ThirdParty": true,
"Custom": true,
}
if !types[value] {
errors = append(errors, fmt.Errorf("CodePipeline: owner can only be one of AWS | ThirdParty | Custom"))
}
return
}
func validateAwsCodePipelineStageActionConfiguration(v interface{}, k string) (ws []string, errors []error) {
for k := range v.(map[string]interface{}) {
if k == "OAuthToken" {
errors = append(errors, fmt.Errorf("CodePipeline: OAuthToken should be set as environment variable 'GITHUB_TOKEN'"))
}
}
return
}
func resourceAwsCodePipelineCreate(d *schema.ResourceData, meta interface{}) error {
conn := meta.(*AWSClient).codepipelineconn
params := &codepipeline.CreatePipelineInput{
Pipeline: expandAwsCodePipeline(d),
}
var resp *codepipeline.CreatePipelineOutput
err := resource.Retry(2*time.Minute, func() *resource.RetryError {
var err error
resp, err = conn.CreatePipeline(params)
if err != nil {
return resource.RetryableError(err)
}
return resource.NonRetryableError(err)
})
if err != nil {
return fmt.Errorf("[ERROR] Error creating CodePipeline: %s", err)
}
if resp.Pipeline == nil {
return fmt.Errorf("[ERROR] Error creating CodePipeline: invalid response from AWS")
}
d.SetId(*resp.Pipeline.Name)
return resourceAwsCodePipelineRead(d, meta)
}
func expandAwsCodePipeline(d *schema.ResourceData) *codepipeline.PipelineDeclaration {
pipelineArtifactStore := expandAwsCodePipelineArtifactStore(d)
pipelineStages := expandAwsCodePipelineStages(d)
pipeline := codepipeline.PipelineDeclaration{
Name: aws.String(d.Get("name").(string)),
RoleArn: aws.String(d.Get("role_arn").(string)),
ArtifactStore: pipelineArtifactStore,
Stages: pipelineStages,
}
return &pipeline
}
func expandAwsCodePipelineArtifactStore(d *schema.ResourceData) *codepipeline.ArtifactStore {
configs := d.Get("artifact_store").([]interface{})
data := configs[0].(map[string]interface{})
pipelineArtifactStore := codepipeline.ArtifactStore{
Location: aws.String(data["location"].(string)),
Type: aws.String(data["type"].(string)),
}
tek := data["encryption_key"].([]interface{})
if len(tek) > 0 {
vk := tek[0].(map[string]interface{})
ek := codepipeline.EncryptionKey{
Type: aws.String(vk["type"].(string)),
Id: aws.String(vk["id"].(string)),
}
pipelineArtifactStore.EncryptionKey = &ek
}
return &pipelineArtifactStore
}
func flattenAwsCodePipelineArtifactStore(artifactStore *codepipeline.ArtifactStore) []interface{} {
values := map[string]interface{}{}
values["type"] = *artifactStore.Type
values["location"] = *artifactStore.Location
if artifactStore.EncryptionKey != nil {
as := map[string]interface{}{
"id": *artifactStore.EncryptionKey.Id,
"type": *artifactStore.EncryptionKey.Type,
}
values["encryption_key"] = []interface{}{as}
}
return []interface{}{values}
}
func expandAwsCodePipelineStages(d *schema.ResourceData) []*codepipeline.StageDeclaration {
configs := d.Get("stage").([]interface{})
pipelineStages := []*codepipeline.StageDeclaration{}
for _, stage := range configs {
data := stage.(map[string]interface{})
a := data["action"].([]interface{})
actions := expandAwsCodePipelineActions(a)
pipelineStages = append(pipelineStages, &codepipeline.StageDeclaration{
Name: aws.String(data["name"].(string)),
Actions: actions,
})
}
return pipelineStages
}
func flattenAwsCodePipelineStages(stages []*codepipeline.StageDeclaration) []interface{} {
stagesList := []interface{}{}
for _, stage := range stages {
values := map[string]interface{}{}
values["name"] = *stage.Name
values["action"] = flattenAwsCodePipelineStageActions(stage.Actions)
stagesList = append(stagesList, values)
}
return stagesList
}
func expandAwsCodePipelineActions(s []interface{}) []*codepipeline.ActionDeclaration {
actions := []*codepipeline.ActionDeclaration{}
for _, config := range s {
data := config.(map[string]interface{})
conf := expandAwsCodePipelineStageActionConfiguration(data["configuration"].(map[string]interface{}))
if data["provider"].(string) == "GitHub" {
githubToken := os.Getenv("GITHUB_TOKEN")
if githubToken != "" {
conf["OAuthToken"] = aws.String(githubToken)
}
}
action := codepipeline.ActionDeclaration{
ActionTypeId: &codepipeline.ActionTypeId{
Category: aws.String(data["category"].(string)),
Owner: aws.String(data["owner"].(string)),
Provider: aws.String(data["provider"].(string)),
Version: aws.String(data["version"].(string)),
},
Name: aws.String(data["name"].(string)),
Configuration: conf,
}
oa := data["output_artifacts"].([]interface{})
if len(oa) > 0 {
outputArtifacts := expandAwsCodePipelineActionsOutputArtifacts(oa)
action.OutputArtifacts = outputArtifacts
}
ia := data["input_artifacts"].([]interface{})
if len(ia) > 0 {
inputArtifacts := expandAwsCodePipelineActionsInputArtifacts(ia)
action.InputArtifacts = inputArtifacts
}
ro := data["run_order"].(int)
if ro > 0 {
action.RunOrder = aws.Int64(int64(ro))
}
actions = append(actions, &action)
}
return actions
}
func flattenAwsCodePipelineStageActions(actions []*codepipeline.ActionDeclaration) []interface{} {
actionsList := []interface{}{}
for _, action := range actions {
values := map[string]interface{}{
"category": *action.ActionTypeId.Category,
"owner": *action.ActionTypeId.Owner,
"provider": *action.ActionTypeId.Provider,
"version": *action.ActionTypeId.Version,
"name": *action.Name,
}
if action.Configuration != nil {
config := flattenAwsCodePipelineStageActionConfiguration(action.Configuration)
_, ok := config["OAuthToken"]
actionProvider := *action.ActionTypeId.Provider
if ok && actionProvider == "GitHub" {
delete(config, "OAuthToken")
}
values["configuration"] = config
}
if len(action.OutputArtifacts) > 0 {
values["output_artifacts"] = flattenAwsCodePipelineActionsOutputArtifacts(action.OutputArtifacts)
}
if len(action.InputArtifacts) > 0 {
values["input_artifacts"] = flattenAwsCodePipelineActionsInputArtifacts(action.InputArtifacts)
}
if action.RunOrder != nil {
values["run_order"] = int(*action.RunOrder)
}
actionsList = append(actionsList, values)
}
return actionsList
}
func expandAwsCodePipelineStageActionConfiguration(config map[string]interface{}) map[string]*string {
m := map[string]*string{}
for k, v := range config {
s := v.(string)
m[k] = &s
}
return m
}
func flattenAwsCodePipelineStageActionConfiguration(config map[string]*string) map[string]string {
m := map[string]string{}
for k, v := range config {
m[k] = *v
}
return m
}
func expandAwsCodePipelineActionsOutputArtifacts(s []interface{}) []*codepipeline.OutputArtifact {
outputArtifacts := []*codepipeline.OutputArtifact{}
for _, artifact := range s {
outputArtifacts = append(outputArtifacts, &codepipeline.OutputArtifact{
Name: aws.String(artifact.(string)),
})
}
return outputArtifacts
}
func flattenAwsCodePipelineActionsOutputArtifacts(artifacts []*codepipeline.OutputArtifact) []string {
values := []string{}
for _, artifact := range artifacts {
values = append(values, *artifact.Name)
}
return values
}
func expandAwsCodePipelineActionsInputArtifacts(s []interface{}) []*codepipeline.InputArtifact {
outputArtifacts := []*codepipeline.InputArtifact{}
for _, artifact := range s {
outputArtifacts = append(outputArtifacts, &codepipeline.InputArtifact{
Name: aws.String(artifact.(string)),
})
}
return outputArtifacts
}
func flattenAwsCodePipelineActionsInputArtifacts(artifacts []*codepipeline.InputArtifact) []string {
values := []string{}
for _, artifact := range artifacts {
values = append(values, *artifact.Name)
}
return values
}
func resourceAwsCodePipelineRead(d *schema.ResourceData, meta interface{}) error {
conn := meta.(*AWSClient).codepipelineconn
resp, err := conn.GetPipeline(&codepipeline.GetPipelineInput{
Name: aws.String(d.Id()),
})
if err != nil {
return fmt.Errorf("[ERROR] Error retreiving Pipeline: %q", err)
}
pipeline := resp.Pipeline
if err := d.Set("artifact_store", flattenAwsCodePipelineArtifactStore(pipeline.ArtifactStore)); err != nil {
return err
}
if err := d.Set("stage", flattenAwsCodePipelineStages(pipeline.Stages)); err != nil {
return err
}
d.Set("name", pipeline.Name)
d.Set("role_arn", pipeline.RoleArn)
return nil
}
func resourceAwsCodePipelineUpdate(d *schema.ResourceData, meta interface{}) error {
conn := meta.(*AWSClient).codepipelineconn
pipeline := expandAwsCodePipeline(d)
params := &codepipeline.UpdatePipelineInput{
Pipeline: pipeline,
}
_, err := conn.UpdatePipeline(params)
if err != nil {
return fmt.Errorf(
"[ERROR] Error updating CodePipeline (%s): %s",
d.Id(), err)
}
return resourceAwsCodePipelineRead(d, meta)
}
func resourceAwsCodePipelineDelete(d *schema.ResourceData, meta interface{}) error {
conn := meta.(*AWSClient).codepipelineconn
_, err := conn.DeletePipeline(&codepipeline.DeletePipelineInput{
Name: aws.String(d.Id()),
})
if err != nil {
return err
}
d.SetId("")
return nil
}

View File

@ -0,0 +1,316 @@
package aws
import (
"fmt"
"os"
"testing"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/service/codepipeline"
"github.com/hashicorp/terraform/helper/acctest"
"github.com/hashicorp/terraform/helper/resource"
"github.com/hashicorp/terraform/terraform"
)
func TestAccAWSCodePipeline_basic(t *testing.T) {
if os.Getenv("GITHUB_TOKEN") == "" {
t.Skip("Environment variable GITHUB_TOKEN is not set")
}
name := acctest.RandString(10)
resource.Test(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckAWSCodePipelineDestroy,
Steps: []resource.TestStep{
{
Config: testAccAWSCodePipelineConfig_basic(name),
Check: resource.ComposeTestCheckFunc(
testAccCheckAWSCodePipelineExists("aws_codepipeline.bar"),
resource.TestCheckResourceAttr("aws_codepipeline.bar", "artifact_store.0.type", "S3"),
resource.TestCheckResourceAttr("aws_codepipeline.bar", "artifact_store.0.encryption_key.0.id", "1234"),
resource.TestCheckResourceAttr("aws_codepipeline.bar", "artifact_store.0.encryption_key.0.type", "KMS"),
),
},
{
Config: testAccAWSCodePipelineConfig_basicUpdated(name),
Check: resource.ComposeTestCheckFunc(
testAccCheckAWSCodePipelineExists("aws_codepipeline.bar"),
resource.TestCheckResourceAttr("aws_codepipeline.bar", "artifact_store.0.type", "S3"),
resource.TestCheckResourceAttr("aws_codepipeline.bar", "artifact_store.0.encryption_key.0.id", "4567"),
resource.TestCheckResourceAttr("aws_codepipeline.bar", "artifact_store.0.encryption_key.0.type", "KMS"),
),
},
},
})
}
func testAccCheckAWSCodePipelineExists(n string) resource.TestCheckFunc {
return func(s *terraform.State) error {
rs, ok := s.RootModule().Resources[n]
if !ok {
return fmt.Errorf("Not found: %s", n)
}
if rs.Primary.ID == "" {
return fmt.Errorf("No CodePipeline ID is set")
}
conn := testAccProvider.Meta().(*AWSClient).codepipelineconn
_, err := conn.GetPipeline(&codepipeline.GetPipelineInput{
Name: aws.String(rs.Primary.ID),
})
if err != nil {
return err
}
return nil
}
}
func testAccCheckAWSCodePipelineDestroy(s *terraform.State) error {
conn := testAccProvider.Meta().(*AWSClient).codepipelineconn
for _, rs := range s.RootModule().Resources {
if rs.Type != "aws_codepipeline" {
continue
}
_, err := conn.GetPipeline(&codepipeline.GetPipelineInput{
Name: aws.String(rs.Primary.ID),
})
if err == nil {
return fmt.Errorf("Expected AWS CodePipeline to be gone, but was still found")
}
return nil
}
return fmt.Errorf("Default error in CodePipeline Test")
}
func testAccAWSCodePipelineConfig_basic(rName string) string {
return fmt.Sprintf(`
resource "aws_s3_bucket" "foo" {
bucket = "tf-test-pipeline-%s"
acl = "private"
}
resource "aws_iam_role" "codepipeline_role" {
name = "codepipeline-role-%s"
assume_role_policy = <<EOF
{
"Version": "2012-10-17",
"Statement": [
{
"Effect": "Allow",
"Principal": {
"Service": "codepipeline.amazonaws.com"
},
"Action": "sts:AssumeRole"
}
]
}
EOF
}
resource "aws_iam_role_policy" "codepipeline_policy" {
name = "codepipeline_policy"
role = "${aws_iam_role.codepipeline_role.id}"
policy = <<EOF
{
"Version": "2012-10-17",
"Statement": [
{
"Effect":"Allow",
"Action": [
"s3:GetObject",
"s3:GetObjectVersion",
"s3:GetBucketVersioning"
],
"Resource": [
"${aws_s3_bucket.foo.arn}",
"${aws_s3_bucket.foo.arn}/*"
]
},
{
"Effect": "Allow",
"Action": [
"codebuild:BatchGetBuilds",
"codebuild:StartBuild"
],
"Resource": "*"
}
]
}
EOF
}
resource "aws_codepipeline" "bar" {
name = "test-pipeline-%s"
role_arn = "${aws_iam_role.codepipeline_role.arn}"
artifact_store {
location = "${aws_s3_bucket.foo.bucket}"
type = "S3"
encryption_key {
id = "1234"
type = "KMS"
}
}
stage {
name = "Source"
action {
name = "Source"
category = "Source"
owner = "ThirdParty"
provider = "GitHub"
version = "1"
output_artifacts = ["test"]
configuration {
Owner = "lifesum-terraform"
Repo = "test"
Branch = "master"
}
}
}
stage {
name = "Build"
action {
name = "Build"
category = "Build"
owner = "AWS"
provider = "CodeBuild"
input_artifacts = ["test"]
version = "1"
configuration {
ProjectName = "test"
}
}
}
}
`, rName, rName, rName)
}
func testAccAWSCodePipelineConfig_basicUpdated(rName string) string {
return fmt.Sprintf(`
resource "aws_s3_bucket" "foo" {
bucket = "tf-test-pipeline-%s"
acl = "private"
}
resource "aws_iam_role" "codepipeline_role" {
name = "codepipeline-role-%s"
assume_role_policy = <<EOF
{
"Version": "2012-10-17",
"Statement": [
{
"Effect": "Allow",
"Principal": {
"Service": "codepipeline.amazonaws.com"
},
"Action": "sts:AssumeRole"
}
]
}
EOF
}
resource "aws_iam_role_policy" "codepipeline_policy" {
name = "codepipeline_policy"
role = "${aws_iam_role.codepipeline_role.id}"
policy = <<EOF
{
"Version": "2012-10-17",
"Statement": [
{
"Effect":"Allow",
"Action": [
"s3:GetObject",
"s3:GetObjectVersion",
"s3:GetBucketVersioning"
],
"Resource": [
"${aws_s3_bucket.foo.arn}",
"${aws_s3_bucket.foo.arn}/*"
]
},
{
"Effect": "Allow",
"Action": [
"codebuild:BatchGetBuilds",
"codebuild:StartBuild"
],
"Resource": "*"
}
]
}
EOF
}
resource "aws_codepipeline" "bar" {
name = "test-pipeline-%s"
role_arn = "${aws_iam_role.codepipeline_role.arn}"
artifact_store {
location = "${aws_s3_bucket.foo.bucket}"
type = "S3"
encryption_key {
id = "4567"
type = "KMS"
}
}
stage {
name = "Source"
action {
name = "Source"
category = "Source"
owner = "ThirdParty"
provider = "GitHub"
version = "1"
output_artifacts = ["bar"]
configuration {
Owner = "foo-terraform"
Repo = "bar"
Branch = "stable"
}
}
}
stage {
name = "Build"
action {
name = "Build"
category = "Build"
owner = "AWS"
provider = "CodeBuild"
input_artifacts = ["bar"]
version = "1"
configuration {
ProjectName = "foo"
}
}
}
}
`, rName, rName, rName)
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,152 @@
// THIS FILE IS AUTOMATICALLY GENERATED. DO NOT EDIT.
package codepipeline
const (
// ErrCodeActionNotFoundException for service response error code
// "ActionNotFoundException".
//
// The specified action cannot be found.
ErrCodeActionNotFoundException = "ActionNotFoundException"
// ErrCodeActionTypeNotFoundException for service response error code
// "ActionTypeNotFoundException".
//
// The specified action type cannot be found.
ErrCodeActionTypeNotFoundException = "ActionTypeNotFoundException"
// ErrCodeApprovalAlreadyCompletedException for service response error code
// "ApprovalAlreadyCompletedException".
//
// The approval action has already been approved or rejected.
ErrCodeApprovalAlreadyCompletedException = "ApprovalAlreadyCompletedException"
// ErrCodeInvalidActionDeclarationException for service response error code
// "InvalidActionDeclarationException".
//
// The specified action declaration was specified in an invalid format.
ErrCodeInvalidActionDeclarationException = "InvalidActionDeclarationException"
// ErrCodeInvalidApprovalTokenException for service response error code
// "InvalidApprovalTokenException".
//
// The approval request already received a response or has expired.
ErrCodeInvalidApprovalTokenException = "InvalidApprovalTokenException"
// ErrCodeInvalidBlockerDeclarationException for service response error code
// "InvalidBlockerDeclarationException".
//
// Reserved for future use.
ErrCodeInvalidBlockerDeclarationException = "InvalidBlockerDeclarationException"
// ErrCodeInvalidClientTokenException for service response error code
// "InvalidClientTokenException".
//
// The client token was specified in an invalid format
ErrCodeInvalidClientTokenException = "InvalidClientTokenException"
// ErrCodeInvalidJobException for service response error code
// "InvalidJobException".
//
// The specified job was specified in an invalid format or cannot be found.
ErrCodeInvalidJobException = "InvalidJobException"
// ErrCodeInvalidJobStateException for service response error code
// "InvalidJobStateException".
//
// The specified job state was specified in an invalid format.
ErrCodeInvalidJobStateException = "InvalidJobStateException"
// ErrCodeInvalidNextTokenException for service response error code
// "InvalidNextTokenException".
//
// The next token was specified in an invalid format. Make sure that the next
// token you provided is the token returned by a previous call.
ErrCodeInvalidNextTokenException = "InvalidNextTokenException"
// ErrCodeInvalidNonceException for service response error code
// "InvalidNonceException".
//
// The specified nonce was specified in an invalid format.
ErrCodeInvalidNonceException = "InvalidNonceException"
// ErrCodeInvalidStageDeclarationException for service response error code
// "InvalidStageDeclarationException".
//
// The specified stage declaration was specified in an invalid format.
ErrCodeInvalidStageDeclarationException = "InvalidStageDeclarationException"
// ErrCodeInvalidStructureException for service response error code
// "InvalidStructureException".
//
// The specified structure was specified in an invalid format.
ErrCodeInvalidStructureException = "InvalidStructureException"
// ErrCodeJobNotFoundException for service response error code
// "JobNotFoundException".
//
// The specified job was specified in an invalid format or cannot be found.
ErrCodeJobNotFoundException = "JobNotFoundException"
// ErrCodeLimitExceededException for service response error code
// "LimitExceededException".
//
// The number of pipelines associated with the AWS account has exceeded the
// limit allowed for the account.
ErrCodeLimitExceededException = "LimitExceededException"
// ErrCodeNotLatestPipelineExecutionException for service response error code
// "NotLatestPipelineExecutionException".
//
// The stage has failed in a later run of the pipeline and the pipelineExecutionId
// associated with the request is out of date.
ErrCodeNotLatestPipelineExecutionException = "NotLatestPipelineExecutionException"
// ErrCodePipelineExecutionNotFoundException for service response error code
// "PipelineExecutionNotFoundException".
//
// The pipeline execution was specified in an invalid format or cannot be found,
// or an execution ID does not belong to the specified pipeline.
ErrCodePipelineExecutionNotFoundException = "PipelineExecutionNotFoundException"
// ErrCodePipelineNameInUseException for service response error code
// "PipelineNameInUseException".
//
// The specified pipeline name is already in use.
ErrCodePipelineNameInUseException = "PipelineNameInUseException"
// ErrCodePipelineNotFoundException for service response error code
// "PipelineNotFoundException".
//
// The specified pipeline was specified in an invalid format or cannot be found.
ErrCodePipelineNotFoundException = "PipelineNotFoundException"
// ErrCodePipelineVersionNotFoundException for service response error code
// "PipelineVersionNotFoundException".
//
// The specified pipeline version was specified in an invalid format or cannot
// be found.
ErrCodePipelineVersionNotFoundException = "PipelineVersionNotFoundException"
// ErrCodeStageNotFoundException for service response error code
// "StageNotFoundException".
//
// The specified stage was specified in an invalid format or cannot be found.
ErrCodeStageNotFoundException = "StageNotFoundException"
// ErrCodeStageNotRetryableException for service response error code
// "StageNotRetryableException".
//
// The specified stage can't be retried because the pipeline structure or stage
// state changed after the stage was not completed; the stage contains no failed
// actions; one or more actions are still in progress; or another retry attempt
// is already in progress.
ErrCodeStageNotRetryableException = "StageNotRetryableException"
// ErrCodeValidationException for service response error code
// "ValidationException".
//
// The validation was specified in an invalid format.
ErrCodeValidationException = "ValidationException"
)

View File

@ -0,0 +1,201 @@
// THIS FILE IS AUTOMATICALLY GENERATED. DO NOT EDIT.
package codepipeline
import (
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/client"
"github.com/aws/aws-sdk-go/aws/client/metadata"
"github.com/aws/aws-sdk-go/aws/request"
"github.com/aws/aws-sdk-go/aws/signer/v4"
"github.com/aws/aws-sdk-go/private/protocol/jsonrpc"
)
// Overview
//
// This is the AWS CodePipeline API Reference. This guide provides descriptions
// of the actions and data types for AWS CodePipeline. Some functionality for
// your pipeline is only configurable through the API. For additional information,
// see the AWS CodePipeline User Guide (http://docs.aws.amazon.com/codepipeline/latest/userguide/welcome.html).
//
// You can use the AWS CodePipeline API to work with pipelines, stages, actions,
// gates, and transitions, as described below.
//
// Pipelines are models of automated release processes. Each pipeline is uniquely
// named, and consists of actions, gates, and stages.
//
// You can work with pipelines by calling:
//
// * CreatePipeline, which creates a uniquely-named pipeline.
//
// * DeletePipeline, which deletes the specified pipeline.
//
// * GetPipeline, which returns information about a pipeline structure.
//
// * GetPipelineExecution, which returns information about a specific execution
// of a pipeline.
//
// * GetPipelineState, which returns information about the current state
// of the stages and actions of a pipeline.
//
// * ListPipelines, which gets a summary of all of the pipelines associated
// with your account.
//
// * StartPipelineExecution, which runs the the most recent revision of an
// artifact through the pipeline.
//
// * UpdatePipeline, which updates a pipeline with edits or changes to the
// structure of the pipeline.
//
// Pipelines include stages, which are logical groupings of gates and actions.
// Each stage contains one or more actions that must complete before the next
// stage begins. A stage will result in success or failure. If a stage fails,
// then the pipeline stops at that stage and will remain stopped until either
// a new version of an artifact appears in the source location, or a user takes
// action to re-run the most recent artifact through the pipeline. You can call
// GetPipelineState, which displays the status of a pipeline, including the
// status of stages in the pipeline, or GetPipeline, which returns the entire
// structure of the pipeline, including the stages of that pipeline. For more
// information about the structure of stages and actions, also refer to the
// AWS CodePipeline Pipeline Structure Reference (http://docs.aws.amazon.com/codepipeline/latest/userguide/pipeline-structure.html).
//
// Pipeline stages include actions, which are categorized into categories such
// as source or build actions performed within a stage of a pipeline. For example,
// you can use a source action to import artifacts into a pipeline from a source
// such as Amazon S3. Like stages, you do not work with actions directly in
// most cases, but you do define and interact with actions when working with
// pipeline operations such as CreatePipeline and GetPipelineState.
//
// Pipelines also include transitions, which allow the transition of artifacts
// from one stage to the next in a pipeline after the actions in one stage complete.
//
// You can work with transitions by calling:
//
// * DisableStageTransition, which prevents artifacts from transitioning
// to the next stage in a pipeline.
//
// * EnableStageTransition, which enables transition of artifacts between
// stages in a pipeline.
//
// Using the API to integrate with AWS CodePipeline
//
// For third-party integrators or developers who want to create their own integrations
// with AWS CodePipeline, the expected sequence varies from the standard API
// user. In order to integrate with AWS CodePipeline, developers will need to
// work with the following items:
//
// Jobs, which are instances of an action. For example, a job for a source action
// might import a revision of an artifact from a source.
//
// You can work with jobs by calling:
//
// * AcknowledgeJob, which confirms whether a job worker has received the
// specified job,
//
// * GetJobDetails, which returns the details of a job,
//
// * PollForJobs, which determines whether there are any jobs to act upon,
//
//
// * PutJobFailureResult, which provides details of a job failure, and
//
// * PutJobSuccessResult, which provides details of a job success.
//
// Third party jobs, which are instances of an action created by a partner action
// and integrated into AWS CodePipeline. Partner actions are created by members
// of the AWS Partner Network.
//
// You can work with third party jobs by calling:
//
// * AcknowledgeThirdPartyJob, which confirms whether a job worker has received
// the specified job,
//
// * GetThirdPartyJobDetails, which requests the details of a job for a partner
// action,
//
// * PollForThirdPartyJobs, which determines whether there are any jobs to
// act upon,
//
// * PutThirdPartyJobFailureResult, which provides details of a job failure,
// and
//
// * PutThirdPartyJobSuccessResult, which provides details of a job success.
// The service client's operations are safe to be used concurrently.
// It is not safe to mutate any of the client's properties though.
// Please also see https://docs.aws.amazon.com/goto/WebAPI/codepipeline-2015-07-09
type CodePipeline struct {
*client.Client
}
// Used for custom client initialization logic
var initClient func(*client.Client)
// Used for custom request initialization logic
var initRequest func(*request.Request)
// Service information constants
const (
ServiceName = "codepipeline" // Service endpoint prefix API calls made to.
EndpointsID = ServiceName // Service ID for Regions and Endpoints metadata.
)
// New creates a new instance of the CodePipeline client with a session.
// If additional configuration is needed for the client instance use the optional
// aws.Config parameter to add your extra config.
//
// Example:
// // Create a CodePipeline client from just a session.
// svc := codepipeline.New(mySession)
//
// // Create a CodePipeline client with additional configuration
// svc := codepipeline.New(mySession, aws.NewConfig().WithRegion("us-west-2"))
func New(p client.ConfigProvider, cfgs ...*aws.Config) *CodePipeline {
c := p.ClientConfig(EndpointsID, cfgs...)
return newClient(*c.Config, c.Handlers, c.Endpoint, c.SigningRegion, c.SigningName)
}
// newClient creates, initializes and returns a new service client instance.
func newClient(cfg aws.Config, handlers request.Handlers, endpoint, signingRegion, signingName string) *CodePipeline {
svc := &CodePipeline{
Client: client.New(
cfg,
metadata.ClientInfo{
ServiceName: ServiceName,
SigningName: signingName,
SigningRegion: signingRegion,
Endpoint: endpoint,
APIVersion: "2015-07-09",
JSONVersion: "1.1",
TargetPrefix: "CodePipeline_20150709",
},
handlers,
),
}
// Handlers
svc.Handlers.Sign.PushBackNamed(v4.SignRequestHandler)
svc.Handlers.Build.PushBackNamed(jsonrpc.BuildHandler)
svc.Handlers.Unmarshal.PushBackNamed(jsonrpc.UnmarshalHandler)
svc.Handlers.UnmarshalMeta.PushBackNamed(jsonrpc.UnmarshalMetaHandler)
svc.Handlers.UnmarshalError.PushBackNamed(jsonrpc.UnmarshalErrorHandler)
// Run custom client initialization if present
if initClient != nil {
initClient(svc.Client)
}
return svc
}
// newRequest creates a new request for a CodePipeline operation and runs any
// custom request initialization.
func (c *CodePipeline) newRequest(op *request.Operation, params, data interface{}) *request.Request {
req := c.NewRequest(op, params, data)
// Run custom request initialization if present
if initRequest != nil {
initRequest(req)
}
return req
}

10
vendor/vendor.json vendored
View File

@ -804,7 +804,15 @@
"versionExact": "v1.6.25"
},
{
"checksumSHA1": "NYRd4lqocAcZdkEvLHAZYyXz8Bs=",
"checksumSHA1": "LXjLQyMAadcANG0UURWuw4di2YE=",
"path": "github.com/aws/aws-sdk-go/service/codepipeline",
"revision": "b2852089fcfd0794d25d57f193e15121ab8a6d9e",
"revisionTime": "2017-02-17T18:38:06Z",
"version": "v1.6.25",
"versionExact": "v1.6.25"
},
{
"checksumSHA1": "tTOqlqLdJim89F/7bLsXe5WfIyQ=",
"path": "github.com/aws/aws-sdk-go/service/configservice",
"revision": "b2852089fcfd0794d25d57f193e15121ab8a6d9e",
"revisionTime": "2017-02-17T18:38:06Z",

View File

@ -0,0 +1,147 @@
---
layout: "aws"
page_title: "AWS: aws_codepipeline"
sidebar_current: "docs-aws-resource-codepipeline"
description: |-
Provides a CodePipeline
---
# aws\_codepipeline
Provides a CodePipeline.
~> **NOTE on `aws_codepipeline`:** - the `GITHUB_TOKEN` environment variable must be set if the GitHub provider is specified.
## Example Usage
```
resource "aws_s3_bucket" "foo" {
bucket = "test-bucket"
acl = "private"
}
resource "aws_iam_role" "foo" {
name = "test-role"
assume_role_policy = <<EOF
{
"Version": "2012-10-17",
"Statement": [
{
"Effect": "Allow",
"Principal": {
"Service": "codepipeline.amazonaws.com"
},
"Action": "sts:AssumeRole"
}
]
}
EOF
}
resource "aws_iam_role_policy" "codepipeline_policy" {
name = "codepipeline_policy"
role = "${aws_iam_role.codepipeline_role.id}"
policy = <<EOF
{
"Version": "2012-10-17",
"Statement": [
{
"Effect":"Allow",
"Action": [
"s3:GetObject",
"s3:GetObjectVersion",
"s3:GetBucketVersioning"
],
"Resource": [
"${aws_s3_bucket.foo.arn}",
"${aws_s3_bucket.foo.arn}/*"
]
},
{
"Effect": "Allow",
"Action": [
"codebuild:BatchGetBuilds",
"codebuild:StartBuild"
],
"Resource": "*"
}
]
}
EOF
}
resource "aws_codepipeline" "foo" {
name = "tf-test-pipeline"
role_arn = "${aws_iam_role.foo.arn}"
artifact_store {
location = "${aws_s3_bucket.foo.bucket}"
type = "S3"
}
stage {
name = "Source"
action {
name = "Source"
category = "Source"
owner = "ThirdParty"
provider = "GitHub"
version = "1"
output_artifacts = ["test"]
configuration {
Owner = "my-organization"
Repo = "test"
Branch = "master"
}
}
}
stage {
name = "Build"
action {
name = "Build"
category = "Build"
owner = "AWS"
provider = "CodeBuild"
input_artifacts = ["test"]
version = "1"
configuration {
ProjectName = "test"
}
}
}
}
```
## Argument Reference
The following arguments are supported:
* `name` - (Required) The name of the pipeline.
* `role_arn` - (Required) A service role Amazon Resource Name (ARN) that grants AWS CodePipeline permission to make calls to AWS services on your behalf.
* `artifact_store` (Required) An artifact_store block. Artifact stores are documented below.
* `stage` (Required) A stage block. Stages are documented below.
An `artifact_store` block supports the following arguments:
* `location` - (Required) The location where AWS CodePipeline stores artifacts for a pipeline, such as an S3 bucket.
* `type` - (Required) The type of the artifact store, such as Amazon S3
* `encryption_key` - (Optional) The encryption key AWS CodePipeline uses to encrypt the data in the artifact store, such as an AWS Key Management Service (AWS KMS) key. If you don't specify a key, AWS CodePipeline uses the default key for Amazon Simple Storage Service (Amazon S3).
A `stage` block supports the following arguments:
* `name` - (Required) The name of the stage.
* `action` - (Required) The action(s) to include in the stage
## Attributes Reference
The following attributes are exported:
* `id` - The codepipeline ID.

View File

@ -311,6 +311,17 @@
</ul>
</li>
<li<%= sidebar_current(/^docs-aws-resource-codepipeline/) %>>
<a href="#">CodePipeline Resources</a>
<ul class="nav nav-visible">
<li<%= sidebar_current("docs-aws-resource-codepipeline") %>>
<a href="/docs/providers/aws/r/codepipeline.html">aws_codepipeline</a>
</li>
</ul>
</li>
<li<%= sidebar_current(/^docs-aws-resource-config/) %>>
<a href="#">Config Resources</a>
<ul class="nav nav-visible">