Recognize tf query files (#36929)

pull/37043/head
Samsondeen 11 months ago committed by GitHub
parent f7cb9097da
commit 44eb0c69ef
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

@ -29,6 +29,8 @@ func (r Resource) String() string {
return fmt.Sprintf("data.%s.%s", r.Type, r.Name)
case EphemeralResourceMode:
return fmt.Sprintf("ephemeral.%s.%s", r.Type, r.Name)
case ListResourceMode:
return fmt.Sprintf("list.%s.%s", r.Type, r.Name)
default:
// Should never happen, but we'll return a string here rather than
// crashing just in case it does.
@ -511,6 +513,10 @@ const (
// EphemeralResourceMode indicates an ephemeral resource, as defined by
// "ephemeral" blocks in configuration.
EphemeralResourceMode ResourceMode = 'E'
// ListResourceMode indicates a list resource, as defined by
// "list" blocks in tfquery configuration.
ListResourceMode ResourceMode = 'L'
)
// AbsResourceInstanceObject represents one of the specific remote objects

@ -12,16 +12,18 @@ func _() {
_ = x[ManagedResourceMode-77]
_ = x[DataResourceMode-68]
_ = x[EphemeralResourceMode-69]
_ = x[ListResourceMode-76]
}
const (
_ResourceMode_name_0 = "InvalidResourceMode"
_ResourceMode_name_1 = "DataResourceModeEphemeralResourceMode"
_ResourceMode_name_2 = "ManagedResourceMode"
_ResourceMode_name_2 = "ListResourceModeManagedResourceMode"
)
var (
_ResourceMode_index_1 = [...]uint8{0, 16, 37}
_ResourceMode_index_2 = [...]uint8{0, 16, 35}
)
func (i ResourceMode) String() string {
@ -31,8 +33,9 @@ func (i ResourceMode) String() string {
case 68 <= i && i <= 69:
i -= 68
return _ResourceMode_name_1[_ResourceMode_index_1[i]:_ResourceMode_index_1[i+1]]
case i == 77:
return _ResourceMode_name_2
case 76 <= i && i <= 77:
i -= 76
return _ResourceMode_name_2[_ResourceMode_index_2[i]:_ResourceMode_index_2[i+1]]
default:
return "ResourceMode(" + strconv.FormatInt(int64(i), 10) + ")"
}

@ -22,8 +22,8 @@ import (
//
// LoadConfig performs the basic syntax and uniqueness validations that are
// required to process the individual modules
func (l *Loader) LoadConfig(rootDir string) (*configs.Config, hcl.Diagnostics) {
return l.loadConfig(l.parser.LoadConfigDir(rootDir))
func (l *Loader) LoadConfig(rootDir string, parserOpts ...configs.Option) (*configs.Config, hcl.Diagnostics) {
return l.loadConfig(l.parser.LoadConfigDir(rootDir, parserOpts...))
}
// LoadConfigWithTests matches LoadConfig, except the configs.Config contains

@ -49,6 +49,7 @@ type Module struct {
ManagedResources map[string]*Resource
DataResources map[string]*Resource
EphemeralResources map[string]*Resource
ListResources map[string]*Resource
Actions map[string]*Action
Moved []*Moved
@ -130,6 +131,7 @@ func NewModule(primaryFiles, overrideFiles []*File) (*Module, hcl.Diagnostics) {
ManagedResources: map[string]*Resource{},
EphemeralResources: map[string]*Resource{},
DataResources: map[string]*Resource{},
ListResources: map[string]*Resource{},
Checks: map[string]*Check{},
ProviderMetas: map[addrs.Provider]*ProviderMeta{},
Tests: map[string]*TestFile{},
@ -200,6 +202,8 @@ func (m *Module) ResourceByAddr(addr addrs.Resource) *Resource {
return m.DataResources[key]
case addrs.EphemeralResourceMode:
return m.EphemeralResources[key]
case addrs.ListResourceMode:
return m.ListResources[key]
default:
return nil
}
@ -526,6 +530,75 @@ func (m *Module) appendFile(file *File) hcl.Diagnostics {
return diags
}
func (m *Module) appendQueryFile(file *QueryFile) hcl.Diagnostics {
var diags hcl.Diagnostics
for _, pc := range file.ProviderConfigs {
key := pc.moduleUniqueKey()
if existing, exists := m.ProviderConfigs[key]; exists {
if existing.Alias == "" {
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Duplicate provider configuration",
Detail: fmt.Sprintf("A default (non-aliased) provider configuration for %q was already given at %s. If multiple configurations are required, set the \"alias\" argument for alternative configurations.", existing.Name, existing.DeclRange),
Subject: &pc.DeclRange,
})
} else {
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Duplicate provider configuration",
Detail: fmt.Sprintf("A provider configuration for %q with alias %q was already given at %s. Each configuration for the same provider must have a distinct alias.", existing.Name, existing.Alias, existing.DeclRange),
Subject: &pc.DeclRange,
})
}
continue
}
m.ProviderConfigs[key] = pc
}
for _, v := range file.Variables {
if existing, exists := m.Variables[v.Name]; exists {
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Duplicate variable declaration",
Detail: fmt.Sprintf("A variable named %q was already declared at %s. Variable names must be unique within a module.", existing.Name, existing.DeclRange),
Subject: &v.DeclRange,
})
}
m.Variables[v.Name] = v
}
for _, l := range file.Locals {
if existing, exists := m.Locals[l.Name]; exists {
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Duplicate local value definition",
Detail: fmt.Sprintf("A local value named %q was already defined at %s. Local value names must be unique within a module.", existing.Name, existing.DeclRange),
Subject: &l.DeclRange,
})
}
m.Locals[l.Name] = l
}
for _, ql := range file.ListResources {
key := ql.moduleUniqueKey()
if existing, exists := m.ListResources[key]; exists {
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: fmt.Sprintf("Duplicate list %q configuration", existing.Type),
Detail: fmt.Sprintf("A %s list named %q was already declared at %s. List names must be unique per type in each module.", existing.Type, existing.Name, existing.DeclRange),
Subject: &ql.DeclRange,
})
continue
}
// set the provider FQN for the resource
m.ListResources[key] = ql
ql.Provider = m.ProviderForLocalConfig(ql.ProviderConfigAddr())
}
return diags
}
func (m *Module) mergeFile(file *File) hcl.Diagnostics {
var diags hcl.Diagnostics

@ -48,6 +48,17 @@ func (p *Parser) LoadTestFile(path string) (*TestFile, hcl.Diagnostics) {
return test, diags
}
func (p *Parser) LoadQueryFile(path string) (*QueryFile, hcl.Diagnostics) {
body, diags := p.LoadHCLFile(path)
if body == nil {
return nil, diags
}
query, queryDiags := loadQueryFile(body)
diags = append(diags, queryDiags...)
return query, diags
}
// LoadMockDataFile reads the file at the given path and parses it as a
// Terraform mock data file.
//

@ -6,7 +6,6 @@ package configs
import (
"fmt"
"os"
"path"
"path/filepath"
"strings"
@ -17,10 +16,19 @@ const (
DefaultTestDirectory = "tests"
)
// LoadConfigDir reads the .tf and .tf.json files in the given directory
// LoadConfigDir reads the configuration files in the given directory
// as config files (using LoadConfigFile) and then combines these files into
// a single Module.
//
// Main terraform configuration files (.tf and .tf.json) are loaded as the primary
// module, while override files (override.tf and *_override.tf) are loaded as
// overrides.
// Optionally, test files (.tftest.hcl and .tftest.json) can be loaded from
// a subdirectory of the given directory, which is specified by the
// MatchTestFiles option, or from the default test directory.
// If this option is not specified, test files will not be loaded.
// Query files (.tfquery.hcl) are also loaded from the given directory.
//
// If this method returns nil, that indicates that the given directory does not
// exist at all or could not be opened for some reason. Callers may wish to
// detect this case and ignore the returned diagnostics so that they can
@ -36,21 +44,45 @@ const (
//
// .tf files are parsed using the HCL native syntax while .tf.json files are
// parsed using the HCL JSON syntax.
func (p *Parser) LoadConfigDir(path string) (*Module, hcl.Diagnostics) {
primaryPaths, overridePaths, _, diags := p.dirFiles(path, "")
func (p *Parser) LoadConfigDir(path string, opts ...Option) (*Module, hcl.Diagnostics) {
fileSet, diags := p.dirFileSet(path, opts...)
if diags.HasErrors() {
return nil, diags
}
primary, fDiags := p.loadFiles(primaryPaths, false)
diags = append(diags, fDiags...)
override, fDiags := p.loadFiles(overridePaths, true)
diags = append(diags, fDiags...)
// Load the .tf configuration files
primary, fDiags := p.loadFiles(fileSet.Primary, false)
diags = diags.Extend(fDiags)
override, fDiags := p.loadFiles(fileSet.Override, true)
diags = diags.Extend(fDiags)
// Initialize the module
mod, modDiags := NewModule(primary, override)
diags = append(diags, modDiags...)
diags = diags.Extend(modDiags)
// Check if we need to load test files
if len(fileSet.Tests) > 0 {
testFiles, fDiags := p.loadTestFiles(path, fileSet.Tests)
diags = diags.Extend(fDiags)
if mod != nil {
mod.Tests = testFiles
}
}
// Check if we need to load query files
if len(fileSet.Queries) > 0 {
queryFiles, fDiags := p.loadQueryFiles(path, fileSet.Queries)
diags = append(diags, fDiags...)
if mod != nil {
for _, qf := range queryFiles {
diags = diags.Extend(mod.appendQueryFile(qf))
}
}
}
mod.SourceDir = path
if mod != nil {
mod.SourceDir = path
}
return mod, diags
}
@ -58,24 +90,7 @@ func (p *Parser) LoadConfigDir(path string) (*Module, hcl.Diagnostics) {
// LoadConfigDirWithTests matches LoadConfigDir, but the return Module also
// contains any relevant .tftest.hcl files.
func (p *Parser) LoadConfigDirWithTests(path string, testDirectory string) (*Module, hcl.Diagnostics) {
primaryPaths, overridePaths, testPaths, diags := p.dirFiles(path, testDirectory)
if diags.HasErrors() {
return nil, diags
}
primary, fDiags := p.loadFiles(primaryPaths, false)
diags = append(diags, fDiags...)
override, fDiags := p.loadFiles(overridePaths, true)
diags = append(diags, fDiags...)
tests, fDiags := p.loadTestFiles(path, testPaths)
diags = append(diags, fDiags...)
mod, modDiags := NewModuleWithTests(primary, override, tests)
diags = append(diags, modDiags...)
mod.SourceDir = path
return mod, diags
return p.LoadConfigDir(path, MatchTestFiles(testDirectory))
}
func (p *Parser) LoadMockDataDir(dir string, useForPlanDefault bool, source hcl.Range) (*MockData, hcl.Diagnostics) {
@ -129,24 +144,18 @@ func (p *Parser) LoadMockDataDir(dir string, useForPlanDefault bool, source hcl.
//
// If the given directory does not exist or cannot be read, error diagnostics
// are returned. If errors are returned, the resulting lists may be incomplete.
func (p Parser) ConfigDirFiles(dir string) (primary, override []string, diags hcl.Diagnostics) {
primary, override, _, diags = p.dirFiles(dir, "")
return primary, override, diags
}
// ConfigDirFilesWithTests matches ConfigDirFiles except it also returns the
// paths to any test files within the module.
func (p Parser) ConfigDirFilesWithTests(dir string, testDirectory string) (primary, override, tests []string, diags hcl.Diagnostics) {
return p.dirFiles(dir, testDirectory)
func (p Parser) ConfigDirFiles(dir string, opts ...Option) (primary, override []string, diags hcl.Diagnostics) {
fSet, diags := p.dirFileSet(dir, opts...)
return fSet.Primary, fSet.Override, diags
}
// IsConfigDir determines whether the given path refers to a directory that
// exists and contains at least one Terraform config file (with a .tf or
// .tf.json extension.). Note, we explicitely exclude checking for tests here
// as tests must live alongside actual .tf config files.
// as tests must live alongside actual .tf config files. Same goes for query files.
func (p *Parser) IsConfigDir(path string) bool {
primaryPaths, overridePaths, _, _ := p.dirFiles(path, "")
return (len(primaryPaths) + len(overridePaths)) > 0
pathSet, _ := p.dirFileSet(path)
return (len(pathSet.Primary) + len(pathSet.Override)) > 0
}
func (p *Parser) loadFiles(paths []string, override bool) ([]*File, hcl.Diagnostics) {
@ -170,109 +179,6 @@ func (p *Parser) loadFiles(paths []string, override bool) ([]*File, hcl.Diagnost
return files, diags
}
// dirFiles finds Terraform configuration files within dir, splitting them into
// primary and override files based on the filename.
//
// If testsDir is not empty, dirFiles will also retrieve Terraform testing files
// both directly within dir and within testsDir as a subdirectory of dir. In
// this way, testsDir acts both as a direction to retrieve test files within the
// main direction and as the location for additional test files.
func (p *Parser) dirFiles(dir string, testsDir string) (primary, override, tests []string, diags hcl.Diagnostics) {
includeTests := len(testsDir) > 0
if includeTests {
testPath := path.Join(dir, testsDir)
infos, err := p.fs.ReadDir(testPath)
if err != nil {
// Then we couldn't read from the testing directory for some reason.
if os.IsNotExist(err) {
// Then this means the testing directory did not exist.
// We won't actually stop loading the rest of the configuration
// for this, we will add a warning to explain to the user why
// test files weren't processed but leave it at that.
if testsDir != DefaultTestDirectory {
// We'll only add the warning if a directory other than the
// default has been requested. If the user is just loading
// the default directory then we have no expectation that
// it should actually exist.
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagWarning,
Summary: "Test directory does not exist",
Detail: fmt.Sprintf("Requested test directory %s does not exist.", testPath),
})
}
} else {
// Then there is some other reason we couldn't load. We will
// treat this as a full error.
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Failed to read test directory",
Detail: fmt.Sprintf("Test directory %s could not be read: %v.", testPath, err),
})
// We'll also stop loading the rest of the config for this.
return
}
} else {
for _, testInfo := range infos {
if testInfo.IsDir() || IsIgnoredFile(testInfo.Name()) {
continue
}
if strings.HasSuffix(testInfo.Name(), ".tftest.hcl") || strings.HasSuffix(testInfo.Name(), ".tftest.json") {
tests = append(tests, filepath.Join(testPath, testInfo.Name()))
}
}
}
}
infos, err := p.fs.ReadDir(dir)
if err != nil {
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Failed to read module directory",
Detail: fmt.Sprintf("Module directory %s does not exist or cannot be read.", dir),
})
return
}
for _, info := range infos {
if info.IsDir() {
// We only care about terraform configuration files.
continue
}
name := info.Name()
ext := fileExt(name)
if ext == "" || IsIgnoredFile(name) {
continue
}
if ext == ".tftest.hcl" || ext == ".tftest.json" {
if includeTests {
tests = append(tests, filepath.Join(dir, name))
}
continue
}
baseName := name[:len(name)-len(ext)] // strip extension
isOverride := baseName == "override" || strings.HasSuffix(baseName, "_override")
fullPath := filepath.Join(dir, name)
if isOverride {
override = append(override, fullPath)
} else {
primary = append(primary, fullPath)
}
}
return
}
func (p *Parser) loadTestFiles(basePath string, paths []string) (map[string]*TestFile, hcl.Diagnostics) {
var diags hcl.Diagnostics
@ -299,6 +205,21 @@ func (p *Parser) loadTestFiles(basePath string, paths []string) (map[string]*Tes
return tfs, diags
}
func (p *Parser) loadQueryFiles(basePath string, paths []string) ([]*QueryFile, hcl.Diagnostics) {
files := make([]*QueryFile, 0, len(paths))
var diags hcl.Diagnostics
for _, path := range paths {
f, fDiags := p.LoadQueryFile(path)
diags = append(diags, fDiags...)
if f != nil {
files = append(files, f)
}
}
return files, diags
}
// fileExt returns the Terraform configuration extension of the given
// path, or a blank string if it is not a recognized extension.
func fileExt(path string) string {
@ -310,6 +231,10 @@ func fileExt(path string) string {
return ".tftest.hcl"
} else if strings.HasSuffix(path, ".tftest.json") {
return ".tftest.json"
} else if strings.HasSuffix(path, ".tfquery.hcl") {
return ".tfquery.hcl"
} else if strings.HasSuffix(path, ".tfquery.json") {
return ".tfquery.json"
} else {
return ""
}
@ -335,10 +260,10 @@ func IsEmptyDir(path, testDir string) (bool, error) {
}
p := NewParser(nil)
fs, os, tests, diags := p.dirFiles(path, testDir)
fSet, diags := p.dirFileSet(path, MatchTestFiles(testDir))
if diags.HasErrors() {
return false, diags
}
return len(fs) == 0 && len(os) == 0 && len(tests) == 0, nil
return len(fSet.Primary) == 0 && len(fSet.Override) == 0 && len(fSet.Tests) == 0, nil
}

@ -8,6 +8,7 @@ import (
"io/ioutil"
"os"
"path/filepath"
"strings"
"testing"
"github.com/hashicorp/hcl/v2"
@ -133,7 +134,7 @@ func TestParserLoadConfigDirWithTests(t *testing.T) {
}
parser := NewParser(nil)
mod, diags := parser.LoadConfigDirWithTests(directory, testDirectory)
mod, diags := parser.LoadConfigDir(directory, MatchTestFiles(testDirectory))
if len(diags) > 0 { // We don't want any warnings or errors.
t.Errorf("unexpected diagnostics")
for _, diag := range diags {
@ -148,6 +149,82 @@ func TestParserLoadConfigDirWithTests(t *testing.T) {
}
}
func TestParserLoadConfigDirWithQueries(t *testing.T) {
tests := []struct {
name string
directory string
diagnostics []string
listResources int
managedResources int
allowExperiments bool
}{
{
name: "simple",
directory: "testdata/query-files/valid/simple",
listResources: 2,
allowExperiments: true,
},
{
name: "mixed",
directory: "testdata/query-files/valid/mixed",
listResources: 2,
managedResources: 1,
allowExperiments: true,
},
{
name: "loading query lists with no-experiments",
directory: "testdata/query-files/valid/mixed",
managedResources: 1,
listResources: 0,
allowExperiments: false,
},
{
name: "no-provider",
directory: "testdata/query-files/invalid/no-provider",
diagnostics: []string{
"testdata/query-files/invalid/no-provider/main.tfquery.hcl:1,1-27: Missing \"provider\" attribute; You must specify a provider attribute when defining a list block.",
},
allowExperiments: true,
},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
parser := NewParser(nil)
parser.AllowLanguageExperiments(test.allowExperiments)
mod, diags := parser.LoadConfigDir(test.directory)
if len(test.diagnostics) > 0 {
if !diags.HasErrors() {
t.Errorf("expected errors, but found none")
}
if len(diags) != len(test.diagnostics) {
t.Fatalf("expected %d errors, but found %d", len(test.diagnostics), len(diags))
}
for i, diag := range diags {
if diag.Error() != test.diagnostics[i] {
t.Errorf("expected error to be %q, but found %q", test.diagnostics[i], diag.Error())
}
}
} else {
if len(diags) > 0 { // We don't want any warnings or errors.
t.Errorf("unexpected diagnostics")
for _, diag := range diags {
t.Logf("- %s", diag)
}
}
}
if len(mod.ListResources) != test.listResources {
t.Errorf("incorrect number of list blocks found: %d", len(mod.ListResources))
}
if len(mod.ManagedResources) != test.managedResources {
t.Errorf("incorrect number of managed blocks found: %d", len(mod.ManagedResources))
}
})
}
}
func TestParserLoadTestFiles_Invalid(t *testing.T) {
tcs := map[string][]string{
@ -248,7 +325,7 @@ func TestParserLoadConfigDirWithTests_ReturnsWarnings(t *testing.T) {
t.Errorf("expected summary to be \"Test directory does not exist\" but was \"%s\"", diags[0].Summary)
}
if diags[0].Detail != "Requested test directory testdata/valid-modules/with-tests/not_real does not exist." {
if !strings.HasPrefix(diags[0].Detail, "Requested test directory testdata/valid-modules/with-tests/not_real does not exist.") {
t.Errorf("expected detail to be \"Requested test directory testdata/valid-modules/with-tests/not_real does not exist.\" but was \"%s\"", diags[0].Detail)
}
}
@ -283,7 +360,7 @@ func TestParserLoadConfigDirFailure(t *testing.T) {
parser := NewParser(nil)
path := filepath.Join("testdata/invalid-modules", name)
_, diags := parser.LoadConfigDirWithTests(path, "tests")
_, diags := parser.LoadConfigDir(path, MatchTestFiles("tests"))
if !diags.HasErrors() {
t.Errorf("no errors; want at least one")
for _, diag := range diags {

@ -0,0 +1,240 @@
// Copyright (c) HashiCorp, Inc.
// SPDX-License-Identifier: BUSL-1.1
package configs
import (
"fmt"
"os"
"path"
"path/filepath"
"strings"
"github.com/hashicorp/hcl/v2"
"github.com/spf13/afero"
)
// ConfigFileSet holds the different types of configuration files found in a directory.
type ConfigFileSet struct {
Primary []string // Regular .tf and .tf.json files
Override []string // Override files (override.tf or *_override.tf)
Tests []string // Test files (.tftest.hcl or .tftest.json)
Queries []string // Query files (.tfquery.hcl)
}
// FileMatcher is an interface for components that can match and process specific file types
// in a Terraform module directory.
type FileMatcher interface {
// Matches returns true if the given filename should be processed by this matcher
Matches(name string) bool
// DirFiles allows the matcher to process files in a directory
// only relevant to its type.
DirFiles(dir string, cfg *parserConfig, fileSet *ConfigFileSet) hcl.Diagnostics
}
// Option is a functional option type for configuring the parser
type Option func(*parserConfig)
type parserConfig struct {
matchers []FileMatcher
testDirectory string
fs afero.Afero
}
// dirFileSet finds Terraform configuration files within directory dir
// and returns a ConfigFileSet containing the found files.
// It uses the given options to determine which types of files to look for
// and how to process them. The returned ConfigFileSet contains the paths
// to the found files, categorized by their type (primary, override, test, query).
func (p *Parser) dirFileSet(dir string, opts ...Option) (ConfigFileSet, hcl.Diagnostics) {
var diags hcl.Diagnostics
fileSet := ConfigFileSet{
Primary: []string{},
Override: []string{},
Tests: []string{},
Queries: []string{},
}
// Set up the parser configuration
cfg := &parserConfig{
// We always match .tf files
matchers: []FileMatcher{&moduleFiles{}},
testDirectory: DefaultTestDirectory,
fs: p.fs,
}
if p.AllowsLanguageExperiments() {
cfg.matchers = append(cfg.matchers, &queryFiles{})
}
for _, opt := range opts {
opt(cfg)
}
// Scan and categorize main directory files
mainDirDiags := p.rootFiles(dir, cfg.matchers, &fileSet)
diags = append(diags, mainDirDiags...)
if diags.HasErrors() {
return fileSet, diags
}
// Process matcher-specific files
for _, matcher := range cfg.matchers {
matcherDiags := matcher.DirFiles(dir, cfg, &fileSet)
diags = append(diags, matcherDiags...)
}
return fileSet, diags
}
// rootFiles scans the main directory for configuration files
// and categorizes them using the appropriate file matchers.
func (p *Parser) rootFiles(dir string, matchers []FileMatcher, fileSet *ConfigFileSet) hcl.Diagnostics {
var diags hcl.Diagnostics
// Read main directory files
infos, err := p.fs.ReadDir(dir)
if err != nil {
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Failed to read module directory",
Detail: fmt.Sprintf("Module directory %s does not exist or cannot be read.", dir),
})
return diags
}
for _, info := range infos {
if info.IsDir() || IsIgnoredFile(info.Name()) {
continue
}
name := info.Name()
fullPath := filepath.Join(dir, name)
// Try each matcher to see if it matches
for _, matcher := range matchers {
if matcher.Matches(name) {
switch p := matcher.(type) {
case *moduleFiles:
if p.isOverride(name) {
fileSet.Override = append(fileSet.Override, fullPath)
} else {
fileSet.Primary = append(fileSet.Primary, fullPath)
}
case *testFiles:
fileSet.Tests = append(fileSet.Tests, fullPath)
case *queryFiles:
fileSet.Queries = append(fileSet.Queries, fullPath)
}
break // Stop checking other matchers once a match is found
}
}
}
return diags
}
// MatchTestFiles adds a matcher for Terraform test files (.tftest.hcl and .tftest.json)
func MatchTestFiles(dir string) Option {
return func(o *parserConfig) {
o.testDirectory = dir
o.matchers = append(o.matchers, &testFiles{})
}
}
// moduleFiles matches regular Terraform configuration files (.tf and .tf.json)
type moduleFiles struct{}
func (m *moduleFiles) Matches(name string) bool {
ext := fileExt(name)
if ext != ".tf" && ext != ".tf.json" {
return false
}
return true
}
func (m *moduleFiles) isOverride(name string) bool {
ext := fileExt(name)
if ext != ".tf" && ext != ".tf.json" {
return false
}
baseName := name[:len(name)-len(ext)] // strip extension
isOverride := baseName == "override" || strings.HasSuffix(baseName, "_override")
return isOverride
}
func (m *moduleFiles) DirFiles(dir string, options *parserConfig, fileSet *ConfigFileSet) hcl.Diagnostics {
return nil
}
// testFiles matches Terraform test files (.tftest.hcl and .tftest.json)
type testFiles struct{}
func (t *testFiles) Matches(name string) bool {
return strings.HasSuffix(name, ".tftest.hcl") || strings.HasSuffix(name, ".tftest.json")
}
func (t *testFiles) DirFiles(dir string, opts *parserConfig, fileSet *ConfigFileSet) hcl.Diagnostics {
var diags hcl.Diagnostics
testPath := path.Join(dir, opts.testDirectory)
testInfos, err := opts.fs.ReadDir(testPath)
if err != nil {
// Then we couldn't read from the testing directory for some reason.
if os.IsNotExist(err) {
// Then this means the testing directory did not exist.
// We won't actually stop loading the rest of the configuration
// for this, we will add a warning to explain to the user why
// test files weren't processed but leave it at that.
if opts.testDirectory != DefaultTestDirectory {
// We'll only add the warning if a directory other than the
// default has been requested. If the user is just loading
// the default directory then we have no expectation that
// it should actually exist.
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagWarning,
Summary: "Test directory does not exist",
Detail: fmt.Sprintf("Requested test directory %s does not exist.", testPath),
})
}
} else {
// Then there is some other reason we couldn't load. We will
// treat this as a full error.
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Failed to read test directory",
Detail: fmt.Sprintf("Test directory %s could not be read: %v.", testPath, err),
})
// We'll also stop loading the rest of the config for this.
return diags
}
return diags
}
// Process test files
for _, info := range testInfos {
if !t.Matches(info.Name()) {
continue
}
name := info.Name()
fileSet.Tests = append(fileSet.Tests, filepath.Join(testPath, name))
}
return diags
}
// queryFiles matches Terraform query files (.tfquery.hcl and .tfquery.json)
type queryFiles struct{}
func (q *queryFiles) Matches(name string) bool {
return strings.HasSuffix(name, ".tfquery.hcl") || strings.HasSuffix(name, ".tfquery.json")
}
func (q *queryFiles) DirFiles(dir string, options *parserConfig, fileSet *ConfigFileSet) hcl.Diagnostics {
return nil
}

@ -83,7 +83,7 @@ func testNestedModuleConfigFromDirWithTests(t *testing.T, path string) (*Config,
t.Helper()
parser := NewParser(nil)
mod, diags := parser.LoadConfigDirWithTests(path, "tests")
mod, diags := parser.LoadConfigDir(path, MatchTestFiles("tests"))
if mod == nil {
t.Fatal("got nil root module; want non-nil")
}

@ -0,0 +1,178 @@
// Copyright (c) HashiCorp, Inc.
// SPDX-License-Identifier: BUSL-1.1
package configs
import (
"fmt"
"github.com/hashicorp/hcl/v2"
"github.com/hashicorp/hcl/v2/hclsyntax"
"github.com/hashicorp/terraform/internal/addrs"
)
// QueryFile represents a single query file within a configuration directory.
//
// A query file is made up of a sequential list of List blocks, each defining a
// set of filters to apply when listning a List operation
type QueryFile struct {
// Providers defines a set of providers that are available to the list blocks
// within this query file.
Providers map[string]*Provider
ProviderConfigs []*Provider
Locals []*Local
Variables []*Variable
// ListResources is a slice of List blocks within the query file.
ListResources []*Resource
VariablesDeclRange hcl.Range
}
func loadQueryFile(body hcl.Body) (*QueryFile, hcl.Diagnostics) {
var diags hcl.Diagnostics
file := &QueryFile{
Providers: make(map[string]*Provider),
}
content, contentDiags := body.Content(queryFileSchema)
diags = append(diags, contentDiags...)
listBlockNames := make(map[string]hcl.Range)
for _, block := range content.Blocks {
switch block.Type {
case "list":
list, listDiags := decodeQueryListBlock(block)
diags = append(diags, listDiags...)
if !listDiags.HasErrors() {
file.ListResources = append(file.ListResources, list)
}
if rng, exists := listBlockNames[list.Name]; exists {
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Duplicate \"list\" block names",
Detail: fmt.Sprintf("This query file already has a list block named %s defined at %s.", list.Name, rng),
Subject: block.DefRange.Ptr(),
})
continue
}
listBlockNames[list.Name] = list.DeclRange
case "provider":
cfg, cfgDiags := decodeProviderBlock(block, false)
diags = append(diags, cfgDiags...)
if cfg != nil {
file.ProviderConfigs = append(file.ProviderConfigs, cfg)
}
case "variable":
cfg, cfgDiags := decodeVariableBlock(block, false)
diags = append(diags, cfgDiags...)
if cfg != nil {
file.Variables = append(file.Variables, cfg)
}
case "locals":
defs, defsDiags := decodeLocalsBlock(block)
diags = append(diags, defsDiags...)
file.Locals = append(file.Locals, defs...)
default:
// We don't expect any other block types in a query file.
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Invalid block type",
Detail: fmt.Sprintf("This block type is not valid within a query file: %s", block.Type),
Subject: block.DefRange.Ptr(),
})
}
}
return file, diags
}
func decodeQueryListBlock(block *hcl.Block) (*Resource, hcl.Diagnostics) {
var diags hcl.Diagnostics
content, remain, contentDiags := block.Body.PartialContent(QueryListResourceBlockSchema)
diags = append(diags, contentDiags...)
r := Resource{
Mode: addrs.ListResourceMode,
Type: block.Labels[0],
TypeRange: block.LabelRanges[0],
Name: block.Labels[1],
DeclRange: block.DefRange,
Config: remain,
}
if attr, exists := content.Attributes["provider"]; exists {
var providerDiags hcl.Diagnostics
r.ProviderConfigRef, providerDiags = decodeProviderConfigRef(attr.Expr, "provider")
diags = append(diags, providerDiags...)
} else {
// Must have a provider attribute.
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Missing \"provider\" attribute",
Detail: "You must specify a provider attribute when defining a list block.",
Subject: r.DeclRange.Ptr(),
})
}
if !hclsyntax.ValidIdentifier(r.Name) {
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Invalid list block name",
Detail: badIdentifierDetail,
Subject: r.DeclRange.Ptr(),
})
}
if attr, exists := content.Attributes["count"]; exists {
r.Count = attr.Expr
}
if attr, exists := content.Attributes["for_each"]; exists {
r.ForEach = attr.Expr
// Cannot have count and for_each on the same resource block
if r.Count != nil {
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: `Invalid combination of "count" and "for_each"`,
Detail: `The "count" and "for_each" meta-arguments are mutually-exclusive.`,
Subject: &attr.NameRange,
})
}
}
return &r, diags
}
// QueryListResourceBlockSchema is the schema for a list resource type within
// a terraform query file.
var QueryListResourceBlockSchema = &hcl.BodySchema{
Attributes: commonResourceAttributes,
}
// queryFileSchema is the schema for a terraform query file. It defines the
// expected structure of the file, including the types of supported blocks and their
// attributes.
var queryFileSchema = &hcl.BodySchema{
Blocks: []hcl.BlockHeaderSchema{
{
Type: "list",
LabelNames: []string{"type", "name"},
},
{
Type: "provider",
LabelNames: []string{"name"},
},
{
Type: "locals",
},
{
Type: "variable",
LabelNames: []string{"name"},
},
},
}

@ -0,0 +1,6 @@
list "aws_instance" "test" {
count = 1
tags = {
Name = "test"
}
}

@ -0,0 +1,8 @@
resource "aws_instance" "test" {
provider = aws
count = 1
tags = {
Name = "test"
}
}

@ -0,0 +1,14 @@
list "aws_instance" "test" {
provider = aws
count = 1
tags = {
Name = "test"
}
}
list "aws_instance" "test2" {
provider = aws
count = 1
tags = {
Name = join("-", ["test2", list.aws_instance.test.data[0]])
}
}

@ -0,0 +1,14 @@
list "aws_instance" "test" {
provider = aws
count = 1
tags = {
Name = "test"
}
}
list "aws_instance" "test2" {
provider = aws
count = 1
tags = {
Name = join("-", ["test2", list.aws_instance.test.data[0]])
}
}

@ -53,7 +53,7 @@ func LoadConfigForTests(t *testing.T, rootDir string, testsDir string) (*configs
t.Fatalf("failed to refresh modules after installation: %s", err)
}
config, hclDiags := loader.LoadConfig(rootDir)
config, hclDiags := loader.LoadConfig(rootDir, configs.MatchTestFiles(testsDir))
diags = diags.Append(hclDiags)
return config, loader, cleanup, diags
}

@ -139,7 +139,7 @@ func testModuleInline(t testing.TB, sources map[string]string) *configs.Config {
t.Fatalf("failed to refresh modules after installation: %s", err)
}
config, diags := loader.LoadConfigWithTests(cfgPath, "tests")
config, diags := loader.LoadConfig(cfgPath, configs.MatchTestFiles("tests"))
if diags.HasErrors() {
t.Fatal(diags.Error())
}

Loading…
Cancel
Save