allow datasources to use other datasources

create a null data source for testing. We can choose to document if we want to, but it's a convenience for us

add a test to catch cyclic datasource dependency, update tests to include out of order data sources, and update the code to clean up the returned diagnostics generated from the recursive evaluation

PR review comments
pull/11248/head
Megan Marsh 5 years ago
parent 9cab184006
commit 48de1fc7da

@ -17,6 +17,7 @@ import (
nullbuilder "github.com/hashicorp/packer/builder/null"
hcppackerimagedatasource "github.com/hashicorp/packer/datasource/hcp-packer-image"
hcppackeriterationdatasource "github.com/hashicorp/packer/datasource/hcp-packer-iteration"
nulldatasource "github.com/hashicorp/packer/datasource/null"
packerimageiterationdatasource "github.com/hashicorp/packer/datasource/packer-image-iteration"
artificepostprocessor "github.com/hashicorp/packer/post-processor/artifice"
checksumpostprocessor "github.com/hashicorp/packer/post-processor/checksum"
@ -64,6 +65,7 @@ var PostProcessors = map[string]packersdk.PostProcessor{
var Datasources = map[string]packersdk.Datasource{
"hcp-packer-image": new(hcppackerimagedatasource.Datasource),
"hcp-packer-iteration": new(hcppackeriterationdatasource.Datasource),
"null": new(nulldatasource.Datasource),
"packer-image-iteration": new(packerimageiterationdatasource.Datasource),
}

@ -111,6 +111,7 @@ func (d *Datasource) Execute() (cty.Value, error) {
if err != nil {
return cty.NullVal(cty.EmptyObject), err
}
// Load channel.
log.Printf("[INFO] Reading info from HCP Packer registry (%s) [project_id=%s, organization_id=%s, iteration_id=%s]",
d.config.Bucket, cli.ProjectID, cli.OrganizationID, d.config.IterationID)
@ -124,20 +125,21 @@ func (d *Datasource) Execute() (cty.Value, error) {
output := DatasourceOutput{}
for _, build := range iteration.Builds {
if build.CloudProvider == d.config.CloudProvider {
for _, image := range build.Images {
if image.Region == d.config.Region {
// This is the desired image.
output = DatasourceOutput{
CloudProvider: build.CloudProvider,
ComponentType: build.ComponentType,
CreatedAt: image.CreatedAt.String(),
BuildID: build.ID,
IterationID: build.IterationID,
PackerRunUUID: build.PackerRunUUID,
ID: image.ImageID,
Region: image.Region,
}
if build.CloudProvider != d.config.CloudProvider {
continue
}
for _, image := range build.Images {
if image.Region == d.config.Region {
// This is the desired image.
output = DatasourceOutput{
CloudProvider: build.CloudProvider,
ComponentType: build.ComponentType,
CreatedAt: image.CreatedAt.String(),
BuildID: build.ID,
IterationID: build.IterationID,
PackerRunUUID: build.PackerRunUUID,
ID: image.ImageID,
Region: image.Region,
}
}
}

@ -0,0 +1,68 @@
//go:generate packer-sdc struct-markdown
//go:generate packer-sdc mapstructure-to-hcl2 -type DatasourceOutput,Config
package null
import (
"fmt"
"github.com/zclconf/go-cty/cty"
"github.com/hashicorp/hcl/v2/hcldec"
"github.com/hashicorp/packer-plugin-sdk/common"
"github.com/hashicorp/packer-plugin-sdk/hcl2helper"
packersdk "github.com/hashicorp/packer-plugin-sdk/packer"
"github.com/hashicorp/packer-plugin-sdk/template/config"
)
type Datasource struct {
config Config
}
// The Null data source is designed to demonstrate how data sources work, and
// to provide a test plugin. It does not do anything useful; you assign an
// input string and it gets returned as an output string.
type Config struct {
common.PackerConfig `mapstructure:",squash"`
// This variable will get stored as "output" in the output spec.
Input string `mapstructure:"input" required:"true"`
}
func (d *Datasource) ConfigSpec() hcldec.ObjectSpec {
return d.config.FlatMapstructure().HCL2Spec()
}
func (d *Datasource) Configure(raws ...interface{}) error {
err := config.Decode(&d.config, nil, raws...)
if err != nil {
return err
}
var errs *packersdk.MultiError
if d.config.Input == "" {
errs = packersdk.MultiErrorAppend(errs, fmt.Errorf("The `input` must be specified"))
}
if errs != nil && len(errs.Errors) > 0 {
return errs
}
return nil
}
type DatasourceOutput struct {
// Output will return the input variable, as output.
Output string `mapstructure:"output"`
}
func (d *Datasource) OutputSpec() hcldec.ObjectSpec {
return (&DatasourceOutput{}).FlatMapstructure().HCL2Spec()
}
func (d *Datasource) Execute() (cty.Value, error) {
// Pass input variable through to output.
output := DatasourceOutput{
Output: d.config.Input,
}
return hcl2helper.HCL2ValueFromConfig(output, d.OutputSpec()), nil
}

@ -0,0 +1,70 @@
// Code generated by "packer-sdc mapstructure-to-hcl2"; DO NOT EDIT.
package null
import (
"github.com/hashicorp/hcl/v2/hcldec"
"github.com/zclconf/go-cty/cty"
)
// FlatConfig is an auto-generated flat version of Config.
// Where the contents of a field with a `mapstructure:,squash` tag are bubbled up.
type FlatConfig struct {
PackerBuildName *string `mapstructure:"packer_build_name" cty:"packer_build_name" hcl:"packer_build_name"`
PackerBuilderType *string `mapstructure:"packer_builder_type" cty:"packer_builder_type" hcl:"packer_builder_type"`
PackerCoreVersion *string `mapstructure:"packer_core_version" cty:"packer_core_version" hcl:"packer_core_version"`
PackerDebug *bool `mapstructure:"packer_debug" cty:"packer_debug" hcl:"packer_debug"`
PackerForce *bool `mapstructure:"packer_force" cty:"packer_force" hcl:"packer_force"`
PackerOnError *string `mapstructure:"packer_on_error" cty:"packer_on_error" hcl:"packer_on_error"`
PackerUserVars map[string]string `mapstructure:"packer_user_variables" cty:"packer_user_variables" hcl:"packer_user_variables"`
PackerSensitiveVars []string `mapstructure:"packer_sensitive_variables" cty:"packer_sensitive_variables" hcl:"packer_sensitive_variables"`
Input *string `mapstructure:"input" required:"true" cty:"input" hcl:"input"`
}
// FlatMapstructure returns a new FlatConfig.
// FlatConfig is an auto-generated flat version of Config.
// Where the contents a fields with a `mapstructure:,squash` tag are bubbled up.
func (*Config) FlatMapstructure() interface{ HCL2Spec() map[string]hcldec.Spec } {
return new(FlatConfig)
}
// HCL2Spec returns the hcl spec of a Config.
// This spec is used by HCL to read the fields of Config.
// The decoded values from this spec will then be applied to a FlatConfig.
func (*FlatConfig) HCL2Spec() map[string]hcldec.Spec {
s := map[string]hcldec.Spec{
"packer_build_name": &hcldec.AttrSpec{Name: "packer_build_name", Type: cty.String, Required: false},
"packer_builder_type": &hcldec.AttrSpec{Name: "packer_builder_type", Type: cty.String, Required: false},
"packer_core_version": &hcldec.AttrSpec{Name: "packer_core_version", Type: cty.String, Required: false},
"packer_debug": &hcldec.AttrSpec{Name: "packer_debug", Type: cty.Bool, Required: false},
"packer_force": &hcldec.AttrSpec{Name: "packer_force", Type: cty.Bool, Required: false},
"packer_on_error": &hcldec.AttrSpec{Name: "packer_on_error", Type: cty.String, Required: false},
"packer_user_variables": &hcldec.AttrSpec{Name: "packer_user_variables", Type: cty.Map(cty.String), Required: false},
"packer_sensitive_variables": &hcldec.AttrSpec{Name: "packer_sensitive_variables", Type: cty.List(cty.String), Required: false},
"input": &hcldec.AttrSpec{Name: "input", Type: cty.String, Required: false},
}
return s
}
// FlatDatasourceOutput is an auto-generated flat version of DatasourceOutput.
// Where the contents of a field with a `mapstructure:,squash` tag are bubbled up.
type FlatDatasourceOutput struct {
Output *string `mapstructure:"output" cty:"output" hcl:"output"`
}
// FlatMapstructure returns a new FlatDatasourceOutput.
// FlatDatasourceOutput is an auto-generated flat version of DatasourceOutput.
// Where the contents a fields with a `mapstructure:,squash` tag are bubbled up.
func (*DatasourceOutput) FlatMapstructure() interface{ HCL2Spec() map[string]hcldec.Spec } {
return new(FlatDatasourceOutput)
}
// HCL2Spec returns the hcl spec of a DatasourceOutput.
// This spec is used by HCL to read the fields of DatasourceOutput.
// The decoded values from this spec will then be applied to a FlatDatasourceOutput.
func (*FlatDatasourceOutput) HCL2Spec() map[string]hcldec.Spec {
s := map[string]hcldec.Spec{
"output": &hcldec.AttrSpec{Name: "output", Type: cty.String, Required: false},
}
return s
}

@ -12,6 +12,7 @@ import (
packersdk "github.com/hashicorp/packer-plugin-sdk/packer"
"github.com/hashicorp/packer-plugin-sdk/template/config"
"github.com/hashicorp/packer/builder/null"
dnull "github.com/hashicorp/packer/datasource/null"
. "github.com/hashicorp/packer/hcl2template/internal"
packerregistry "github.com/hashicorp/packer/internal/packer_registry"
"github.com/hashicorp/packer/packer"
@ -41,6 +42,7 @@ func getBasicParser(opts ...getParserOption) *Parser {
},
DataSources: packer.MapOfDatasource{
"amazon-ami": func() (packersdk.Datasource, error) { return &MockDatasource{}, nil },
"null": func() (packersdk.Datasource, error) { return &dnull.Datasource{}, nil },
},
},
}

@ -0,0 +1,6 @@
data "null" "gummy" {
input = "${data.null.bear.output}"
}
data "null" "bear" {
input = "${data.null.gummy.output}"
}

@ -0,0 +1,19 @@
data "null" "foo" {
input = "chocolate"
}
data "null" "yummy" {
input = "${data.null.bang.output}-and-sprinkles"
}
data "null" "bar" {
input = "vanilla"
}
data "null" "baz" {
input = "${data.null.foo.output}-${data.null.bar.output}-swirl"
}
data "null" "bang" {
input = "${data.null.baz.output}-with-marshmallows"
}

@ -2,6 +2,7 @@ package hcl2template
import (
"fmt"
"strings"
"github.com/hashicorp/hcl/v2"
"github.com/hashicorp/hcl/v2/hclsyntax"
@ -62,7 +63,7 @@ func (ds *Datasources) Values() (map[string]cty.Value, hcl.Diagnostics) {
return res, diags
}
func (cfg *PackerConfig) startDatasource(dataSourceStore packer.DatasourceStore, ref DatasourceRef) (packersdk.Datasource, hcl.Diagnostics) {
func (cfg *PackerConfig) startDatasource(dataSourceStore packer.DatasourceStore, ref DatasourceRef, secondaryEvaluation bool) (packersdk.Datasource, hcl.Diagnostics) {
var diags hcl.Diagnostics
block := cfg.Datasources[ref].block
@ -101,19 +102,48 @@ func (cfg *PackerConfig) startDatasource(dataSourceStore packer.DatasourceStore,
Severity: hcl.DiagError,
})
}
// HACK:
// This is where we parse the variables being used in the data sources.
// By passing in the DatasourceContext variable, we tell the EvalContext
// that since this is a datasource being evaluated, we should not allow
// other data sources to be decoded into it. When secondaryEvaluation is
// true, we know that this data source needs another data source in order
// to be evaluated. So we instead retrieve a different EvalContext.
// This is a brute force method to enable data sources to depend on each
// other, and a more elegant solution will be available once we implement a
// true DAG for Packer.
var decoded cty.Value
var moreDiags hcl.Diagnostics
body := block.Body
decoded, moreDiags := decodeHCL2Spec(body, cfg.EvalContext(DatasourceContext, nil), datasource)
if secondaryEvaluation {
// LocalContext is a lie! See above.
decoded, moreDiags = decodeHCL2Spec(body, cfg.EvalContext(LocalContext, nil), datasource)
} else {
decoded, moreDiags = decodeHCL2Spec(body, cfg.EvalContext(DatasourceContext, nil), datasource)
}
diags = append(diags, moreDiags...)
if moreDiags.HasErrors() {
return nil, diags
for _, err = range moreDiags.Errs() {
// If the error is just that there's no "data" object in the
// context, don't fail. We will track this data source for decoding
// again later, once we've evaluated all of the datasources.
// return nil, diags
if !strings.Contains(err.Error(), `There is no variable named "data"`) {
// There's an error that isn't just a recursive data source
// interpolation error
return nil, diags
}
}
}
// In case of cty.Unknown values, this will write a equivalent placeholder of the same type
// Unknown types are not recognized by the json marshal during the RPC call and we have to do this here
// to avoid json parsing failures when running the validate command.
// We don't do this before so we can validate if variable types matches correctly on decodeHCL2Spec.
// In case of cty.Unknown values, this will write a equivalent placeholder
// of the same type. Unknown types are not recognized by the json marshal
// during the RPC call and we have to do this here to avoid json parsing
// failures when running the validate command. We don't do this before so
// we can validate if variable type matches correctly on decodeHCL2Spec.
decoded = hcl2shim.WriteUnknownPlaceholderValues(decoded)
if err := datasource.Configure(decoded); err != nil {
diags = append(diags, &hcl.Diagnostic{
Summary: err.Error(),

@ -31,6 +31,54 @@ func TestParse_datasource(t *testing.T) {
[]packersdk.Build{},
false,
},
{"recursive datasources",
defaultParser,
parseTestArgs{"testdata/datasources/recursive.pkr.hcl", nil, nil},
&PackerConfig{
CorePackerVersionString: lockedVersion,
Basedir: filepath.Join("testdata", "datasources"),
Datasources: Datasources{
{
Type: "null",
Name: "foo",
}: {
Type: "null",
Name: "foo",
},
{
Type: "null",
Name: "bar",
}: {
Type: "null",
Name: "bar",
},
{
Type: "null",
Name: "baz",
}: {
Type: "null",
Name: "baz",
},
{
Type: "null",
Name: "bang",
}: {
Type: "null",
Name: "bang",
},
{
Type: "null",
Name: "yummy",
}: {
Type: "null",
Name: "yummy",
},
},
},
false, false,
[]packersdk.Build{},
false,
},
{"untyped datasource",
defaultParser,
parseTestArgs{"testdata/datasources/untyped.pkr.hcl", nil, nil},
@ -53,26 +101,19 @@ func TestParse_datasource(t *testing.T) {
nil,
false,
},
{"not allowed usage of data source within another data source",
{"inexistent source",
defaultParser,
parseTestArgs{"testdata/datasources/not-allowed.pkr.hcl", nil, nil},
parseTestArgs{"testdata/datasources/inexistent.pkr.hcl", nil, nil},
&PackerConfig{
CorePackerVersionString: lockedVersion,
Basedir: filepath.Join("testdata", "datasources"),
Datasources: Datasources{
{
Type: "amazon-ami",
Name: "test_0",
}: {
Type: "amazon-ami",
Name: "test_0",
},
{
Type: "amazon-ami",
Name: "test_1",
Type: "inexistant",
Name: "test",
}: {
Type: "amazon-ami",
Name: "test_1",
Type: "inexistant",
Name: "test",
},
},
},
@ -80,18 +121,18 @@ func TestParse_datasource(t *testing.T) {
nil,
false,
},
{"inexistent source",
{"duplicate source",
defaultParser,
parseTestArgs{"testdata/datasources/inexistent.pkr.hcl", nil, nil},
parseTestArgs{"testdata/datasources/duplicate.pkr.hcl", nil, nil},
&PackerConfig{
CorePackerVersionString: lockedVersion,
Basedir: filepath.Join("testdata", "datasources"),
Datasources: Datasources{
{
Type: "inexistant",
Type: "amazon-ami",
Name: "test",
}: {
Type: "inexistant",
Type: "amazon-ami",
Name: "test",
},
},
@ -100,19 +141,26 @@ func TestParse_datasource(t *testing.T) {
nil,
false,
},
{"duplicate source",
{"cyclic dependency between data sources",
defaultParser,
parseTestArgs{"testdata/datasources/duplicate.pkr.hcl", nil, nil},
parseTestArgs{"testdata/datasources/dependency_cycle.pkr.hcl", nil, nil},
&PackerConfig{
CorePackerVersionString: lockedVersion,
Basedir: filepath.Join("testdata", "datasources"),
Datasources: Datasources{
{
Type: "amazon-ami",
Name: "test",
Type: "null",
Name: "gummy",
}: {
Type: "amazon-ami",
Name: "test",
Type: "null",
Name: "gummy",
},
{
Type: "null",
Name: "bear",
}: {
Type: "null",
Name: "bear",
},
},
},
@ -121,5 +169,6 @@ func TestParse_datasource(t *testing.T) {
false,
},
}
testParse(t, tests)
}

@ -2,6 +2,7 @@ package hcl2template
import (
"fmt"
"log"
"sort"
"strings"
@ -128,7 +129,7 @@ func (cfg *PackerConfig) EvalContext(ctx BlockContext, variables map[string]cty.
// dependency tree, so that any block can use any block whatever the
// order.
switch ctx {
case LocalContext, BuildContext:
case LocalContext, BuildContext: // todo: refine
datasourceVariables, _ := cfg.Datasources.Values()
ectx.Variables[dataAccessor] = cty.ObjectVal(datasourceVariables)
}
@ -284,12 +285,52 @@ func (c *PackerConfig) evaluateLocalVariable(local *LocalBlock) hcl.Diagnostics
func (cfg *PackerConfig) evaluateDatasources(skipExecution bool) hcl.Diagnostics {
var diags hcl.Diagnostics
dependencies := map[DatasourceRef][]DatasourceRef{}
for ref, ds := range cfg.Datasources {
if ds.value != (cty.Value{}) {
continue
}
// Pre-examine body of this data source to see if it uses another data
// source in any of its input expressions. If so, skip evaluating it for
// now, and add it to a list of datasources to evaluate again, later,
// with the datasources in its context.
// This is essentially creating a very primitive DAG just for data
// source interdependencies.
block := ds.block
body := block.Body
attrs, _ := body.JustAttributes()
skipFirstEval := false
for _, attr := range attrs {
vars := attr.Expr.Variables()
for _, v := range vars {
// check whether the variable is a data source
if v.RootName() == "data" {
// construct, backwards, the data source type and name we
// need to evaluate before this one can be evaluated.
dependsOn := DatasourceRef{
Type: v[1].(hcl.TraverseAttr).Name,
Name: v[2].(hcl.TraverseAttr).Name,
}
log.Printf("The data source %#v depends on datasource %#v", ref, dependsOn)
if dependencies[ref] != nil {
dependencies[ref] = append(dependencies[ref], dependsOn)
} else {
dependencies[ref] = []DatasourceRef{dependsOn}
}
skipFirstEval = true
}
}
}
datasource, startDiags := cfg.startDatasource(cfg.parser.PluginConfig.DataSources, ref)
// Now we have a list of data sources that depend on other data sources.
// Don't evaluate these; only evaluate data sources that we didn't
// mark as having dependencies.
if skipFirstEval {
continue
}
datasource, startDiags := cfg.startDatasource(cfg.parser.PluginConfig.DataSources, ref, false)
diags = append(diags, startDiags...)
if diags.HasErrors() {
continue
@ -311,13 +352,92 @@ func (cfg *PackerConfig) evaluateDatasources(skipExecution bool) hcl.Diagnostics
})
continue
}
ds.value = realValue
cfg.Datasources[ref] = ds
}
// Now that most of our data sources have been started and executed, we can
// try to execute the ones that depend on other data sources.
for ref := range dependencies {
_, moreDiags, _ := cfg.recursivelyEvaluateDatasources(ref, dependencies, skipExecution, 0)
// Deduplicate diagnostics to prevent recursion messes.
cleanedDiags := map[string]*hcl.Diagnostic{}
for _, diag := range moreDiags {
cleanedDiags[diag.Summary] = diag
}
for _, diag := range cleanedDiags {
diags = append(diags, diag)
}
}
return diags
}
func (cfg *PackerConfig) recursivelyEvaluateDatasources(ref DatasourceRef, dependencies map[DatasourceRef][]DatasourceRef, skipExecution bool, depth int) (map[DatasourceRef][]DatasourceRef, hcl.Diagnostics, bool) {
var diags hcl.Diagnostics
var moreDiags hcl.Diagnostics
shouldContinue := true
if depth > 10 {
// Add a comment about recursion.
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Max datasource recursion depth exceeded.",
Detail: "An error occured while recursively evaluating data " +
"sources. Either your data source depends on more than ten " +
"other data sources, or your data sources have a cyclic " +
"dependency. Please simplify your config to continue. ",
})
return dependencies, diags, false
}
ds := cfg.Datasources[ref]
// Make sure everything ref depends on has already been evaluated.
for _, dep := range dependencies[ref] {
if _, ok := dependencies[dep]; ok {
depth += 1
// If this dependency is not in the map, it means we've already
// launched and executed this datasource. Otherwise, it means
// we still need to run it. RECURSION TIME!!
dependencies, moreDiags, shouldContinue = cfg.recursivelyEvaluateDatasources(dep, dependencies, skipExecution, depth)
diags = append(diags, moreDiags...)
}
}
// If we've gotten here, then it means ref doesn't seem to have any further
// dependencies we need to evaluate first. Evaluate it, with the cfg's full
// data source context.
datasource, startDiags := cfg.startDatasource(cfg.parser.PluginConfig.DataSources, ref, true)
if startDiags.HasErrors() {
diags = append(diags, startDiags...)
return dependencies, diags, shouldContinue
}
if skipExecution {
placeholderValue := cty.UnknownVal(hcldec.ImpliedType(datasource.OutputSpec()))
ds.value = placeholderValue
cfg.Datasources[ref] = ds
return dependencies, diags, shouldContinue
}
realValue, err := datasource.Execute()
if err != nil {
diags = append(diags, &hcl.Diagnostic{
Summary: err.Error(),
Subject: &cfg.Datasources[ref].block.DefRange,
Severity: hcl.DiagError,
})
return dependencies, diags, shouldContinue
}
ds.value = realValue
cfg.Datasources[ref] = ds
// remove ref from the dependencies map.
delete(dependencies, ref)
return dependencies, diags, shouldContinue
}
// getCoreBuildProvisioners takes a list of provisioner block, starts according
// provisioners and sends parsed HCL2 over to it.
func (cfg *PackerConfig) getCoreBuildProvisioners(source SourceUseBlock, blocks []*ProvisionerBlock, ectx *hcl.EvalContext) ([]packer.CoreBuildProvisioner, hcl.Diagnostics) {

@ -0,0 +1,5 @@
<!-- Code generated from the comments of the Config struct in datasource/null/data.go; DO NOT EDIT MANUALLY -->
- `input` (string) - This variable will get stored as "output" in the output spec.
<!-- End of code generated from the comments of the Config struct in datasource/null/data.go; -->

@ -0,0 +1,7 @@
<!-- Code generated from the comments of the Config struct in datasource/null/data.go; DO NOT EDIT MANUALLY -->
The Null data source is designed to demonstrate how data sources work, and
to provide a test plugin. It does not do anything useful; you assign an
input string and it gets returned as an output string.
<!-- End of code generated from the comments of the Config struct in datasource/null/data.go; -->

@ -0,0 +1,5 @@
<!-- Code generated from the comments of the DatasourceOutput struct in datasource/null/data.go; DO NOT EDIT MANUALLY -->
- `output` (string) - Output will return the input variable, as output.
<!-- End of code generated from the comments of the DatasourceOutput struct in datasource/null/data.go; -->
Loading…
Cancel
Save