Create Alias Domain Object and Repo (#4275)

* Create alias domain object resource

* Add paginated listing

* fixing ratelimit related panics

* Address reviewer comments

* remove NewAlias value checks
pull/4470/head
Todd 2 years ago
parent 7e2038bfcc
commit e848e76b48

@ -198,6 +198,7 @@ protobuild:
@protoc-go-inject-tag -input=./internal/storage/plugin/store/storage.pb.go
@protoc-go-inject-tag -input=./internal/policy/storage/store/policy.pb.go
@protoc-go-inject-tag -input=./internal/policy/store/policy.pb.go
@protoc-go-inject-tag -input=./internal/alias/target/store/alias.pb.go
# inject classification tags (see: https://github.com/hashicorp/go-eventlogger/tree/main/filters/encrypt)
@protoc-go-inject-tag -input=./internal/gen/controller/api/services/auth_method_service.pb.go

@ -127,6 +127,9 @@ const (
// StoragePolicyPrefix for storage policies.
StoragePolicyPrefix = "pst"
// TargetAliasPrefix is the prefix for target aliases
TargetAliasPrefix = "alt"
)
type ResourceInfo struct {
@ -215,6 +218,11 @@ var prefixToResourceType = map[string]ResourceInfo{
Subtype: UnknownSubtype,
},
TargetAliasPrefix: {
Type: resource.Alias,
Subtype: UnknownSubtype,
},
VaultCredentialStorePrefix: {
Type: resource.CredentialStore,
Subtype: UnknownSubtype,

@ -0,0 +1,8 @@
// Copyright (c) HashiCorp, Inc.
// SPDX-License-Identifier: BUSL-1.1
package alias
const (
Domain = "alias"
)

@ -0,0 +1,92 @@
// Copyright (c) HashiCorp, Inc.
// SPDX-License-Identifier: BUSL-1.1
package target
import (
"context"
"github.com/hashicorp/boundary/internal/alias/target/store"
"github.com/hashicorp/boundary/internal/db/timestamp"
"github.com/hashicorp/boundary/internal/errors"
"github.com/hashicorp/boundary/internal/oplog"
"github.com/hashicorp/boundary/internal/types/resource"
"google.golang.org/protobuf/proto"
)
// An Alias contains a storage alias. It is owned by a scope.
type Alias struct {
*store.Alias
tableName string `gorm:"-"`
}
func (al *Alias) clone() *Alias {
cp := proto.Clone(al.Alias)
return &Alias{
Alias: cp.(*store.Alias),
}
}
// allocAlias is just easier/better than leaking the underlying type
// bits to the repo, since the repo needs to alloc this type quite often.
func allocAlias() *Alias {
fresh := &Alias{
Alias: &store.Alias{},
}
return fresh
}
// NewAlias generates a new in-memory alias. Scope and Value must be non-empty.
func NewAlias(ctx context.Context, scopeId, value string, opt ...Option) (*Alias, error) {
const op = "target.NewAlias"
opts, err := getOpts(opt...)
if err != nil {
return nil, errors.Wrap(ctx, err, op)
}
return &Alias{
Alias: &store.Alias{
Name: opts.withName,
Description: opts.withDescription,
ScopeId: scopeId,
Value: value,
DestinationId: opts.withDestinationId,
HostId: opts.withHostId,
},
}, nil
}
// GetResourceType returns the resource type of the Alias
func (al Alias) GetResourceType() resource.Type {
return resource.Alias
}
func (al *Alias) TableName() string {
if al.tableName != "" {
return al.tableName
}
return "alias_target"
}
func (al *Alias) SetTableName(tableName string) {
al.tableName = tableName
}
type deletedAlias struct {
PublicId string `gorm:"primary_key"`
DeleteTime *timestamp.Timestamp
}
// TableName returns the tablename to override the default gorm table name
func (al *deletedAlias) TableName() string {
return "alias_target_deleted"
}
func newAliasMetadata(a *Alias, op oplog.OpType) oplog.Metadata {
metadata := oplog.Metadata{
"resource-public-id": []string{a.GetPublicId()},
"resource-type": []string{"target alias"},
"op-type": []string{op.String()},
"scope_id": []string{a.ScopeId},
}
return metadata
}

@ -0,0 +1,472 @@
// Copyright (c) HashiCorp, Inc.
// SPDX-License-Identifier: BUSL-1.1
package target
import (
"context"
"testing"
"time"
"github.com/google/go-cmp/cmp"
"github.com/hashicorp/boundary/internal/alias/target/store"
"github.com/hashicorp/boundary/internal/db"
"github.com/hashicorp/boundary/internal/iam"
"github.com/hashicorp/boundary/internal/target/tcp"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"google.golang.org/protobuf/testing/protocmp"
)
func TestNewAlias(t *testing.T) {
t.Run("valid", func(t *testing.T) {
a, err := NewAlias(context.Background(), "global", "valid.alias")
require.NoError(t, err)
assert.NotNil(t, a)
assert.Equal(t, a.ScopeId, "global")
assert.Equal(t, a.Value, "valid.alias")
})
t.Run("with destination", func(t *testing.T) {
a, err := NewAlias(context.Background(), "global", "with.destination", WithDestinationId("ttcp_1234567890"))
require.NoError(t, err)
assert.NotNil(t, a)
assert.Equal(t, a.ScopeId, "global")
assert.Equal(t, a.Value, "with.destination")
assert.Equal(t, a.DestinationId, "ttcp_1234567890")
})
}
func TestCreate(t *testing.T) {
conn, _ := db.TestSetup(t, "postgres")
rw := db.New(conn)
ctx := context.Background()
wrapper := db.TestWrapper(t)
iamRepo := iam.TestRepo(t, conn, wrapper)
_, proj := iam.TestScopes(t, iamRepo)
tar := tcp.TestTarget(ctx, t, conn, proj.GetPublicId(), "test")
cases := []struct {
name string
scope string
value string
opts []Option
validate func(*testing.T, *Alias)
errContains string
}{
{
name: "valid",
scope: "global",
value: "valid.alias",
opts: []Option{WithDestinationId(tar.GetPublicId())},
validate: func(t *testing.T, a *Alias) {
t.Helper()
assert.Equal(t, a.DestinationId, tar.GetPublicId())
},
},
{
name: "valid with host",
scope: "global",
value: "host.valid.alias",
opts: []Option{WithDestinationId(tar.GetPublicId()), WithHostId("hst_1234567890")},
validate: func(t *testing.T, a *Alias) {
t.Helper()
assert.Equal(t, a.DestinationId, tar.GetPublicId())
assert.Equal(t, a.HostId, "hst_1234567890")
},
},
{
name: "valid no destination",
scope: "global",
value: "nodestination.alias",
validate: func(t *testing.T, a *Alias) {
t.Helper()
assert.Empty(t, a.DestinationId)
},
},
{
name: "valid with name",
scope: "global",
value: "valid-with-name.alias",
opts: []Option{WithName("valid-with-name")},
validate: func(t *testing.T, a *Alias) {
t.Helper()
assert.Equal(t, "valid-with-name", a.Name)
},
},
{
name: "valid with description",
scope: "global",
value: "valid-with-description.alias",
opts: []Option{WithName("valid-with-description"), WithDescription("a description")},
validate: func(t *testing.T, a *Alias) {
t.Helper()
assert.Equal(t, "valid-with-description", a.Name)
assert.Equal(t, "a description", a.Description)
},
},
{
name: "host with no destination",
scope: "global",
value: "host.with.no.destination",
opts: []Option{WithHostId("hst_1234567890")},
errContains: `destination_id_set_when_host_id_is_set constraint failed`,
},
{
name: "unsupported project scope",
scope: proj.GetPublicId(),
value: "unsupported.project.scope",
errContains: `alias_must_be_in_global_scope constraint failed`,
},
{
name: "unsupported org scope",
scope: proj.GetParentId(),
value: "unsupported.org.scope",
errContains: `alias_must_be_in_global_scope constraint failed`,
},
{
name: "invalid scope",
scope: "invalid",
value: "invalid.scope",
errContains: `wt_scope_id_check constraint failed`,
},
{
name: "invalid dest",
scope: "global",
value: "invalid.dest",
opts: []Option{WithDestinationId("ttcp_unknown")},
errContains: `foreign key constraint "target_fkey"`,
},
{
name: "invalid alias",
scope: "global",
value: "-not-valid-dns-name-",
errContains: "wt_target_alias_value_shape constraint failed",
},
}
for _, c := range cases {
t.Run(c.name, func(t *testing.T) {
a, err := NewAlias(ctx, c.scope, c.value, c.opts...)
require.NoError(t, err)
assert.NotNil(t, a)
a.PublicId, err = newAliasId(ctx)
require.NoError(t, err)
start := time.Now().UTC()
err = rw.Create(ctx, a)
if c.errContains != "" {
assert.Error(t, err)
assert.Contains(t, err.Error(), c.errContains)
} else {
require.NoError(t, err)
assert.NotNil(t, a)
assert.Equal(t, a.Version, uint32(1))
assert.Equal(t, a.ScopeId, c.scope)
assert.Equal(t, a.Value, c.value)
assert.GreaterOrEqual(t, a.CreateTime.AsTime(), start)
assert.GreaterOrEqual(t, a.UpdateTime.AsTime(), start)
if c.validate != nil {
c.validate(t, a)
}
}
})
}
t.Run("case insensitive duplicate alias", func(t *testing.T) {
a := TestAlias(t, rw, "duplicate.alias")
t.Cleanup(func() {
_, err := rw.Delete(ctx, a)
require.NoError(t, err)
})
var err error
a.PublicId, err = newAliasId(ctx)
require.NoError(t, err)
a.Value = "DUPLICATE.ALIAS"
err = rw.Create(ctx, a)
assert.Error(t, err)
assert.Contains(t, err.Error(), `duplicate key value violates unique constraint "alias_value_uq"`)
})
}
func TestUpdate(t *testing.T) {
conn, _ := db.TestSetup(t, "postgres")
rw := db.New(conn)
ctx := context.Background()
wrapper := db.TestWrapper(t)
iamRepo := iam.TestRepo(t, conn, wrapper)
_, proj := iam.TestScopes(t, iamRepo)
tar1 := tcp.TestTarget(ctx, t, conn, proj.GetPublicId(), "test")
tar2 := tcp.TestTarget(ctx, t, conn, proj.GetPublicId(), "test2")
cases := []struct {
name string
startingOptions []Option
in *Alias
fieldMask []string
nullMask []string
want *Alias
errContains string
}{
{
name: "update alias value",
in: &Alias{
Alias: &store.Alias{Value: "updated.alias"},
},
fieldMask: []string{"Value"},
want: &Alias{
Alias: &store.Alias{
ScopeId: "global",
Value: "updated.alias",
},
},
},
{
name: "remove alias value",
in: &Alias{
Alias: &store.Alias{},
},
fieldMask: []string{"Value"},
errContains: `wt_alias_too_short constraint failed:`,
},
{
name: "update destination id",
startingOptions: []Option{WithDestinationId(tar1.GetPublicId())},
in: &Alias{
Alias: &store.Alias{DestinationId: tar2.GetPublicId()},
},
fieldMask: []string{"DestinationId"},
want: &Alias{
Alias: &store.Alias{
ScopeId: "global",
Value: "test.alias",
DestinationId: tar2.GetPublicId(),
},
},
},
{
name: "update destination id with host id",
startingOptions: []Option{
WithDestinationId(tar1.GetPublicId()),
WithHostId("hst_1234567890"),
},
in: &Alias{
Alias: &store.Alias{
DestinationId: tar2.GetPublicId(),
},
},
fieldMask: []string{"DestinationId"},
want: &Alias{
Alias: &store.Alias{
ScopeId: "global",
Value: "test.alias",
DestinationId: tar2.GetPublicId(),
HostId: "hst_1234567890",
},
},
},
{
name: "remove destination id",
startingOptions: []Option{WithDestinationId(tar1.GetPublicId())},
in: &Alias{
Alias: &store.Alias{},
},
nullMask: []string{"DestinationId"},
want: &Alias{
Alias: &store.Alias{
ScopeId: "global",
Value: "test.alias",
},
},
},
{
name: "remove destination id with host id",
startingOptions: []Option{
WithDestinationId(tar1.GetPublicId()),
WithHostId("hst_1234567890"),
},
in: &Alias{
Alias: &store.Alias{},
},
nullMask: []string{"DestinationId"},
want: &Alias{
Alias: &store.Alias{
ScopeId: "global",
Value: "test.alias",
},
},
},
{
name: "update host id",
startingOptions: []Option{
WithDestinationId(tar1.GetPublicId()),
WithHostId("hst_1234567890"),
},
in: &Alias{
Alias: &store.Alias{
HostId: "hst_0987654321",
},
},
fieldMask: []string{"HostId"},
want: &Alias{
Alias: &store.Alias{
ScopeId: "global",
Value: "test.alias",
DestinationId: tar1.GetPublicId(),
HostId: "hst_0987654321",
},
},
},
{
name: "remove host id",
startingOptions: []Option{
WithDestinationId(tar1.GetPublicId()),
WithHostId("hst_1234567890"),
},
in: &Alias{
Alias: &store.Alias{},
},
nullMask: []string{"HostId"},
want: &Alias{
Alias: &store.Alias{
ScopeId: "global",
Value: "test.alias",
DestinationId: tar1.GetPublicId(),
},
},
},
{
name: "update name",
startingOptions: []Option{
WithName("updateName"),
},
in: &Alias{
Alias: &store.Alias{
Name: "updateName-updated",
},
},
fieldMask: []string{"Name"},
want: &Alias{
Alias: &store.Alias{
ScopeId: "global",
Name: "updateName-updated",
Value: "test.alias",
},
},
},
{
name: "remove name",
startingOptions: []Option{
WithName("updateName"),
},
in: &Alias{
Alias: &store.Alias{},
},
nullMask: []string{"Name"},
want: &Alias{
Alias: &store.Alias{
ScopeId: "global",
Value: "test.alias",
},
},
},
{
name: "update description",
startingOptions: []Option{
WithDescription("description"),
},
in: &Alias{
Alias: &store.Alias{
Description: "description-updated",
},
},
fieldMask: []string{"Description"},
want: &Alias{
Alias: &store.Alias{
ScopeId: "global",
Description: "description-updated",
Value: "test.alias",
},
},
},
{
name: "remove description",
startingOptions: []Option{
WithDescription("description"),
},
in: &Alias{
Alias: &store.Alias{},
},
nullMask: []string{"Description"},
want: &Alias{
Alias: &store.Alias{
ScopeId: "global",
Value: "test.alias",
},
},
},
}
for _, c := range cases {
t.Run(c.name, func(t *testing.T) {
a := TestAlias(t, rw, "test.alias", c.startingOptions...)
t.Cleanup(func() {
_, err := rw.Delete(ctx, a)
require.NoError(t, err)
})
in := c.in.clone()
in.PublicId = a.PublicId
in.Version = a.Version
_, err := rw.Update(ctx, in, c.fieldMask, c.nullMask)
if c.errContains != "" {
assert.Error(t, err)
assert.Contains(t, err.Error(), c.errContains)
} else {
require.NoError(t, err)
assert.Greater(t, in.UpdateTime.AsTime(), in.CreateTime.AsTime())
c.want.Version = 2
c.want.PublicId = a.PublicId
in.UpdateTime = nil
in.CreateTime = nil
assert.Empty(t, cmp.Diff(c.want, in, protocmp.Transform()))
}
})
}
}
func TestDelete(t *testing.T) {
conn, _ := db.TestSetup(t, "postgres")
rw := db.New(conn)
ctx := context.Background()
t.Run("delete existing", func(t *testing.T) {
a := TestAlias(t, rw, "alias.to.delete")
n, err := rw.Delete(ctx, a)
assert.NoError(t, err)
assert.Equal(t, 1, n)
})
t.Run("delete existing with destination", func(t *testing.T) {
_, p := iam.TestScopes(t, iam.TestRepo(t, conn, db.TestWrapper(t)))
tar := tcp.TestTarget(ctx, t, conn, p.GetPublicId(), "test")
a := TestAlias(t, rw, "alias.with.destination", WithDestinationId(tar.GetPublicId()))
n, err := rw.Delete(ctx, a)
assert.NoError(t, err)
assert.Equal(t, 1, n)
})
t.Run("delete non-existent", func(t *testing.T) {
a := allocAlias()
a.PublicId = "alias_does_not_exist"
n, err := rw.Delete(ctx, a)
assert.NoError(t, err)
assert.Equal(t, 0, n)
})
}

@ -0,0 +1,13 @@
// Copyright (c) HashiCorp, Inc.
// SPDX-License-Identifier: BUSL-1.1
package target
// These constants are the field names used in the target field mask
const (
nameField = "Name"
descriptionField = "Description"
valueField = "Value"
destinationIdField = "DestinationId"
hostIdField = "HostId"
)

@ -0,0 +1,92 @@
// Copyright (c) HashiCorp, Inc.
// SPDX-License-Identifier: BUSL-1.1
package target
import (
"errors"
"github.com/hashicorp/boundary/internal/pagination"
)
// getOpts - iterate the inbound Options and return a struct
func getOpts(opt ...Option) (options, error) {
opts := getDefaultOptions()
for _, o := range opt {
if err := o(&opts); err != nil {
return opts, err
}
}
return opts, nil
}
// Option - how Options are passed as arguments.
type Option func(*options) error
// options = how options are represented
type options struct {
withName string
withDescription string
withDestinationId string
withHostId string
withLimit int
withStartPageAfterItem pagination.Item
}
func getDefaultOptions() options {
return options{}
}
// WithName provides an option to provide a name.
func WithName(name string) Option {
return func(o *options) error {
o.withName = name
return nil
}
}
// WithDescription provides an option to provide a description.
func WithDescription(desc string) Option {
return func(o *options) error {
o.withDescription = desc
return nil
}
}
// WithDestinationId provides an option to provide a destination id.
func WithDestinationId(id string) Option {
return func(o *options) error {
o.withDestinationId = id
return nil
}
}
// WithHostId provides an option to provide a host id.
func WithHostId(id string) Option {
return func(o *options) error {
o.withHostId = id
return nil
}
}
// WithLimit provides an option to provide a limit. Intentionally allowing
// negative integers. If WithLimit < 0, then unlimited results are
// returned. If WithLimit == 0, then default limits are used for results.
func WithLimit(l int) Option {
return func(o *options) error {
o.withLimit = l
return nil
}
}
// WithStartPageAfterItem is used to paginate over the results.
// The next page will start after the provided item.
func WithStartPageAfterItem(item pagination.Item) Option {
return func(o *options) error {
if item == nil {
return errors.New("item cannot be nil")
}
o.withStartPageAfterItem = item
return nil
}
}

@ -0,0 +1,73 @@
// Copyright (c) HashiCorp, Inc.
// SPDX-License-Identifier: BUSL-1.1
package target
import (
"testing"
"time"
"github.com/hashicorp/boundary/internal/db/timestamp"
"github.com/hashicorp/boundary/internal/pagination"
"github.com/stretchr/testify/assert"
)
type fakeItem struct {
pagination.Item
publicId string
updateTime time.Time
}
func (p *fakeItem) GetPublicId() string {
return p.publicId
}
func (p *fakeItem) GetUpdateTime() *timestamp.Timestamp {
return timestamp.New(p.updateTime)
}
func Test_GetOpts(t *testing.T) {
t.Parallel()
t.Run("WithName", func(t *testing.T) {
opts, err := getOpts(WithName("test"))
assert.NoError(t, err)
testOpts := getDefaultOptions()
testOpts.withName = "test"
assert.Equal(t, opts, testOpts)
})
t.Run("WithDescription", func(t *testing.T) {
opts, err := getOpts(WithDescription("test desc"))
assert.NoError(t, err)
testOpts := getDefaultOptions()
testOpts.withDescription = "test desc"
assert.Equal(t, opts, testOpts)
})
t.Run("WithDestinationId", func(t *testing.T) {
opts, err := getOpts(WithDestinationId("test"))
assert.NoError(t, err)
testOpts := getDefaultOptions()
testOpts.withDestinationId = "test"
assert.Equal(t, opts, testOpts)
})
t.Run("WithHostId", func(t *testing.T) {
opts, err := getOpts(WithHostId("test"))
assert.NoError(t, err)
testOpts := getDefaultOptions()
testOpts.withHostId = "test"
assert.Equal(t, opts, testOpts)
})
t.Run("WithLimit", func(t *testing.T) {
opts, err := getOpts(WithLimit(5))
assert.NoError(t, err)
testOpts := getDefaultOptions()
testOpts.withLimit = 5
assert.Equal(t, opts, testOpts)
})
t.Run("WithStartPageAfterItem", func(t *testing.T) {
updateTime := time.Now()
opts, err := getOpts(WithStartPageAfterItem(&fakeItem{nil, "s_1", updateTime}))
assert.NoError(t, err)
assert.Equal(t, opts.withStartPageAfterItem.GetPublicId(), "s_1")
assert.Equal(t, opts.withStartPageAfterItem.GetUpdateTime(), timestamp.New(updateTime))
})
}

@ -0,0 +1,33 @@
// Copyright (c) HashiCorp, Inc.
// SPDX-License-Identifier: BUSL-1.1
package target
import (
"context"
"github.com/hashicorp/boundary/globals"
"github.com/hashicorp/boundary/internal/alias"
"github.com/hashicorp/boundary/internal/db"
"github.com/hashicorp/boundary/internal/errors"
"github.com/hashicorp/boundary/internal/types/resource"
)
func init() {
globals.RegisterPrefixToResourceInfo(globals.TargetAliasPrefix, resource.Alias, alias.Domain, Subtype)
}
// PublicId prefixes for the resources in the static package.
const (
Subtype = globals.Subtype("target")
)
// newAliasId creates a new id for a target alias.
func newAliasId(ctx context.Context) (string, error) {
const op = "target.newAliasId"
id, err := db.NewPublicId(ctx, globals.TargetAliasPrefix)
if err != nil {
return "", errors.Wrap(ctx, err, op)
}
return id, nil
}

@ -0,0 +1,10 @@
// Copyright (c) HashiCorp, Inc.
// SPDX-License-Identifier: BUSL-1.1
package target
const (
estimateCount = `
select sum(reltuples::bigint) as estimate from pg_class where oid in ('alias'::regclass)
`
)

@ -0,0 +1,55 @@
// Copyright (c) HashiCorp, Inc.
// SPDX-License-Identifier: BUSL-1.1
package target
import (
"context"
"github.com/hashicorp/boundary/internal/db"
"github.com/hashicorp/boundary/internal/errors"
"github.com/hashicorp/boundary/internal/kms"
)
// A Repository stores and retrieves the persistent types in the static
// package. It is not safe to use a repository concurrently.
type Repository struct {
reader db.Reader
writer db.Writer
kms *kms.Kms
// defaultLimit provides a default for limiting the number of results
// returned from the repo
defaultLimit int
}
// NewRepository creates a new Repository. The returned repository should
// only be used for one transaction and it is not safe for concurrent go
// routines to access it. WithLimit option is used as a repo wide default
// limit applied to all ListX methods.
func NewRepository(ctx context.Context, r db.Reader, w db.Writer, kms *kms.Kms, opt ...Option) (*Repository, error) {
const op = "static.NewRepository"
switch {
case r == nil:
return nil, errors.New(ctx, errors.InvalidParameter, op, "db.Reader")
case w == nil:
return nil, errors.New(ctx, errors.InvalidParameter, op, "db.Writer")
case kms == nil:
return nil, errors.New(ctx, errors.InvalidParameter, op, "kms")
}
opts, err := getOpts(opt...)
if err != nil {
return nil, errors.Wrap(ctx, err, op)
}
if opts.withLimit == 0 {
// zero signals the boundary defaults should be used.
opts.withLimit = db.DefaultLimit
}
return &Repository{
reader: r,
writer: w,
kms: kms,
defaultLimit: opts.withLimit,
}, nil
}

@ -0,0 +1,265 @@
// Copyright (c) HashiCorp, Inc.
// SPDX-License-Identifier: BUSL-1.1
package target
import (
"context"
"fmt"
"slices"
"strings"
"github.com/hashicorp/boundary/internal/db"
"github.com/hashicorp/boundary/internal/errors"
"github.com/hashicorp/boundary/internal/kms"
"github.com/hashicorp/boundary/internal/oplog"
"github.com/hashicorp/go-dbw"
)
// CreateAlias inserts Alias a into the repository and returns a new
// Alias containing the alias's PublicId. a is not changed. a must
// contain a valid ScopeId. a must not contain a PublicId. The PublicId is
// generated and assigned by this method. opt is ignored.
//
// Description, DestinationId, and HostId are optional.
//
// Value must be globally unique.
func (r *Repository) CreateAlias(ctx context.Context, a *Alias, opt ...Option) (*Alias, error) {
const op = "target.(Repository).CreateAlias"
switch {
case a == nil:
return nil, errors.New(ctx, errors.InvalidParameter, op, "nil Alias")
case a.Alias == nil:
return nil, errors.New(ctx, errors.InvalidParameter, op, "nil embedded Alias")
case a.Value == "":
return nil, errors.New(ctx, errors.InvalidParameter, op, "no value")
case a.ScopeId == "":
return nil, errors.New(ctx, errors.InvalidParameter, op, "no scope id")
case a.PublicId != "":
return nil, errors.New(ctx, errors.InvalidParameter, op, "public id not empty")
}
a = a.clone()
id, err := newAliasId(ctx)
if err != nil {
return nil, errors.Wrap(ctx, err, op)
}
a.PublicId = id
oplogWrapper, err := r.kms.GetWrapper(ctx, a.ScopeId, kms.KeyPurposeOplog)
if err != nil {
return nil, errors.Wrap(ctx, err, op, errors.WithMsg("unable to get oplog wrapper"))
}
metadata := newAliasMetadata(a, oplog.OpType_OP_TYPE_CREATE)
var newAlias *Alias
_, err = r.writer.DoTx(
ctx,
db.StdRetryCnt,
db.ExpBackoff{},
func(_ db.Reader, w db.Writer) error {
newAlias = a.clone()
err := w.Create(
ctx,
newAlias,
db.WithOplog(oplogWrapper, metadata),
)
if err != nil {
return errors.Wrap(ctx, err, op)
}
return nil
},
)
if err != nil {
if errors.IsUniqueError(err) {
switch {
case strings.Contains(err.Error(), `"alias_value_uq"`):
return nil, errors.Wrap(ctx, err, op, errors.WithMsg(fmt.Sprintf("alias value %q is already in use", a.Value)))
case strings.Contains(err.Error(), `"alias_target_scope_id_name_uq"`):
return nil, errors.Wrap(ctx, err, op, errors.WithMsg(fmt.Sprintf("in scope %q, the name %q is already in use", a.ScopeId, a.Name)))
}
}
if strings.Contains(err.Error(), `violates foreign key constraint "target_fkey"`) {
return nil, errors.Wrap(ctx, err, op, errors.WithCode(errors.NotFound), errors.WithMsg("target with specified destination id %q was not found", a.GetDestinationId()))
}
return nil, errors.Wrap(ctx, err, op)
}
return newAlias, nil
}
// UpdateAlias updates the repository entry for a.PublicId with the
// values in a for the fields listed in fieldMask. It returns a new
// Alias containing the updated values and a count of the number of
// records updated. a is not changed.
func (r *Repository) UpdateAlias(ctx context.Context, a *Alias, version uint32, fieldMask []string, opt ...Option) (*Alias, int, error) {
const op = "target.(Repository).UpdateAlias"
switch {
case a == nil:
return nil, db.NoRowsAffected, errors.New(ctx, errors.InvalidParameter, op, "nil Alias")
case a.Alias == nil:
return nil, db.NoRowsAffected, errors.New(ctx, errors.InvalidParameter, op, "nil embedded Alias")
case a.PublicId == "":
return nil, db.NoRowsAffected, errors.New(ctx, errors.InvalidParameter, op, "no public id")
case len(fieldMask) == 0:
return nil, db.NoRowsAffected, errors.New(ctx, errors.EmptyFieldMask, op, "empty field mask")
case version == 0:
return nil, db.NoRowsAffected, errors.New(ctx, errors.InvalidParameter, op, "no version")
}
for _, f := range fieldMask {
switch {
case strings.EqualFold(valueField, f):
case strings.EqualFold(nameField, f):
case strings.EqualFold(descriptionField, f):
case strings.EqualFold(destinationIdField, f):
case strings.EqualFold(hostIdField, f):
default:
return nil, db.NoRowsAffected, errors.New(ctx, errors.InvalidFieldMask, op, fmt.Sprintf("invalid field mask: %s", f))
}
}
dbMask, nullFields := dbw.BuildUpdatePaths(
map[string]any{
nameField: a.Name,
descriptionField: a.Description,
valueField: a.Value,
destinationIdField: a.DestinationId,
hostIdField: a.HostId,
},
fieldMask,
nil,
)
if slices.Contains(nullFields, valueField) {
return nil, db.NoRowsAffected, errors.New(ctx, errors.InvalidParameter, op, "value cannot be empty")
}
oplogWrapper, err := r.kms.GetWrapper(ctx, a.ScopeId, kms.KeyPurposeOplog)
if err != nil {
return nil, db.NoRowsAffected, errors.Wrap(ctx, err, op, errors.WithMsg("unable to get oplog wrapper"))
}
a = a.clone()
metadata := newAliasMetadata(a, oplog.OpType_OP_TYPE_UPDATE)
var rowsUpdated int
var returnedAlias *Alias
_, err = r.writer.DoTx(
ctx,
db.StdRetryCnt,
db.ExpBackoff{},
func(_ db.Reader, w db.Writer) error {
returnedAlias = a.clone()
var err error
rowsUpdated, err = w.Update(
ctx,
returnedAlias,
dbMask,
nullFields,
db.WithOplog(oplogWrapper, metadata),
db.WithVersion(&version),
)
if err != nil {
return errors.Wrap(ctx, err, op)
}
if rowsUpdated > 1 {
return errors.New(ctx, errors.MultipleRecords, op, "more than 1 resource would have been updated")
}
return nil
},
)
if err != nil {
if errors.IsUniqueError(err) {
switch {
case strings.Contains(err.Error(), `"alias_value_uq"`):
return nil, db.NoRowsAffected, errors.Wrap(ctx, err, op, errors.WithMsg(fmt.Sprintf("for alias %s: alias value %q is already in use", a.PublicId, a.Value)))
case strings.Contains(err.Error(), `"alias_target_scope_id_name_uq"`):
return nil, db.NoRowsAffected, errors.Wrap(ctx, err, op, errors.WithMsg(fmt.Sprintf("in scope %s, the name %q is already in use", a.ScopeId, a.Name)))
}
}
if strings.Contains(err.Error(), `violates foreign key constraint "target_fkey"`) {
return nil, db.NoRowsAffected, errors.Wrap(ctx, err, op, errors.WithCode(errors.NotFound), errors.WithMsg("target with specified destination id %q was not found", a.GetDestinationId()))
}
return nil, db.NoRowsAffected, errors.Wrap(ctx, err, op)
}
return returnedAlias, rowsUpdated, nil
}
// LookupAlias returns the Alias for id. Returns nil, nil if no
// Alias is found for id.
func (r *Repository) LookupAlias(ctx context.Context, id string, opt ...Option) (*Alias, error) {
const op = "target.(Repository).LookupAlias"
if id == "" {
return nil, errors.New(ctx, errors.InvalidParameter, op, "no public id")
}
a := allocAlias()
a.PublicId = id
if err := r.reader.LookupByPublicId(ctx, a); err != nil {
if errors.IsNotFoundError(err) {
return nil, nil
}
return nil, errors.Wrap(ctx, err, op, errors.WithMsg(fmt.Sprintf("failed for: %s", id)))
}
return a, nil
}
// DeleteAlias deletes id from the repository returning a count of the
// number of records deleted.
func (r *Repository) DeleteAlias(ctx context.Context, id string, opt ...Option) (int, error) {
const op = "target.(Repository).DeleteAlias"
if id == "" {
return db.NoRowsAffected, errors.New(ctx, errors.InvalidParameter, op, "no public id")
}
a := allocAlias()
a.PublicId = id
if err := r.reader.LookupByPublicId(ctx, a); err != nil {
if errors.IsNotFoundError(err) {
return db.NoRowsAffected, nil
}
return db.NoRowsAffected, errors.Wrap(ctx, err, op, errors.WithMsg(fmt.Sprintf("failed for %s", id)))
}
if a.ScopeId == "" {
return db.NoRowsAffected, errors.New(ctx, errors.InvalidParameter, op, "no scope id")
}
oplogWrapper, err := r.kms.GetWrapper(ctx, a.ScopeId, kms.KeyPurposeOplog)
if err != nil {
return db.NoRowsAffected, errors.Wrap(ctx, err, op, errors.WithMsg("unable to get oplog wrapper"))
}
metadata := newAliasMetadata(a, oplog.OpType_OP_TYPE_DELETE)
var rowsDeleted int
var deleteAlias *Alias
_, err = r.writer.DoTx(
ctx,
db.StdRetryCnt,
db.ExpBackoff{},
func(_ db.Reader, w db.Writer) error {
deleteAlias = a.clone()
var err error
rowsDeleted, err = w.Delete(
ctx,
deleteAlias,
db.WithOplog(oplogWrapper, metadata),
)
if err != nil {
return errors.Wrap(ctx, err, op)
}
if rowsDeleted > 1 {
return errors.New(ctx, errors.MultipleRecords, op, "more than 1 resource would have been deleted")
}
return nil
},
)
if err != nil {
return db.NoRowsAffected, errors.Wrap(ctx, err, op, errors.WithMsg(fmt.Sprintf("delete failed for %s", a.PublicId)))
}
return rowsDeleted, nil
}

@ -0,0 +1,160 @@
// Copyright (c) HashiCorp, Inc.
// SPDX-License-Identifier: BUSL-1.1
package target
import (
"context"
"database/sql"
"fmt"
"time"
"github.com/hashicorp/boundary/internal/db"
"github.com/hashicorp/boundary/internal/db/timestamp"
"github.com/hashicorp/boundary/internal/errors"
)
// listAliases lists aliases in the given scopes and supports WithLimit option.
func (r *Repository) listAliases(ctx context.Context, withScopeIds []string, opt ...Option) ([]*Alias, time.Time, error) {
const op = "target.(Repository).listAliases"
if len(withScopeIds) == 0 {
return nil, time.Time{}, errors.New(ctx, errors.InvalidParameter, op, "missing scope id")
}
opts, err := getOpts(opt...)
if err != nil {
return nil, time.Time{}, errors.Wrap(ctx, err, op)
}
limit := r.defaultLimit
switch {
case opts.withLimit > 0:
// non-zero signals an override of the default limit for the repo.
limit = opts.withLimit
case opts.withLimit < 0:
return nil, time.Time{}, errors.New(ctx, errors.InvalidParameter, op, "limit must be non-negative")
}
var args []any
whereClause := "scope_id in @scope_ids"
args = append(args, sql.Named("scope_ids", withScopeIds))
if opts.withStartPageAfterItem != nil {
whereClause = fmt.Sprintf("(create_time, public_id) < (@last_item_create_time, @last_item_id) and %s", whereClause)
args = append(args,
sql.Named("last_item_create_time", opts.withStartPageAfterItem.GetCreateTime()),
sql.Named("last_item_id", opts.withStartPageAfterItem.GetPublicId()),
)
}
dbOpts := []db.Option{db.WithLimit(limit), db.WithOrder("create_time desc, public_id desc")}
return r.queryAliases(ctx, whereClause, args, dbOpts...)
}
// listAliasesRefresh lists aliases limited by the list
// permissions of the repository.
// Supported options:
// - withTerminated
// - withLimit
// - withStartPageAfterItem
func (r *Repository) listAliasesRefresh(ctx context.Context, updatedAfter time.Time, withScopeIds []string, opt ...Option) ([]*Alias, time.Time, error) {
const op = "target.(Repository).listAliasesRefresh"
switch {
case updatedAfter.IsZero():
return nil, time.Time{}, errors.New(ctx, errors.InvalidParameter, op, "missing updated after time")
case len(withScopeIds) == 0:
return nil, time.Time{}, errors.New(ctx, errors.InvalidParameter, op, "missing scope id")
}
opts, err := getOpts(opt...)
if err != nil {
return nil, time.Time{}, errors.Wrap(ctx, err, op)
}
limit := r.defaultLimit
switch {
case opts.withLimit > 0:
// non-zero signals an override of the default limit for the repo.
limit = opts.withLimit
case opts.withLimit < 0:
return nil, time.Time{}, errors.New(ctx, errors.InvalidParameter, op, "limit must be non-negative")
}
var args []any
whereClause := "update_time > @updated_after_time and scope_id in @scope_ids"
args = append(args,
sql.Named("updated_after_time", timestamp.New(updatedAfter)),
sql.Named("scope_ids", withScopeIds),
)
if opts.withStartPageAfterItem != nil {
whereClause = fmt.Sprintf("(update_time, public_id) < (@last_item_update_time, @last_item_id) and %s", whereClause)
args = append(args,
sql.Named("last_item_update_time", opts.withStartPageAfterItem.GetUpdateTime()),
sql.Named("last_item_id", opts.withStartPageAfterItem.GetPublicId()),
)
}
dbOpts := []db.Option{db.WithLimit(limit), db.WithOrder("update_time desc, public_id desc")}
return r.queryAliases(ctx, whereClause, args, dbOpts...)
}
func (r *Repository) queryAliases(ctx context.Context, whereClause string, args []any, opt ...db.Option) ([]*Alias, time.Time, error) {
const op = "target.(Repository).queryAliases"
var ret []*Alias
var transactionTimestamp time.Time
if _, err := r.writer.DoTx(ctx, db.StdRetryCnt, db.ExpBackoff{}, func(rd db.Reader, w db.Writer) error {
var inRet []*Alias
if err := rd.SearchWhere(ctx, &inRet, whereClause, args, opt...); err != nil {
return errors.Wrap(ctx, err, op)
}
ret = inRet
var err error
transactionTimestamp, err = rd.Now(ctx)
return err
}); err != nil {
return nil, time.Time{}, err
}
return ret, transactionTimestamp, nil
}
// listDeletedIds lists the public IDs of any aliases deleted since the timestamp provided.
func (r *Repository) listDeletedIds(ctx context.Context, since time.Time) ([]string, time.Time, error) {
const op = "target.(Repository).listDeletedIds"
var deletedAliases []*deletedAlias
var transactionTimestamp time.Time
if _, err := r.writer.DoTx(ctx, db.StdRetryCnt, db.ExpBackoff{}, func(r db.Reader, _ db.Writer) error {
if err := r.SearchWhere(ctx, &deletedAliases, "delete_time >= ?", []any{since}); err != nil {
return errors.Wrap(ctx, err, op, errors.WithMsg("failed to query deleted aliases"))
}
var err error
transactionTimestamp, err = r.Now(ctx)
if err != nil {
return errors.Wrap(ctx, err, op, errors.WithMsg("failed to get transaction timestamp"))
}
return nil
}); err != nil {
return nil, time.Time{}, err
}
var aliasIds []string
for _, sess := range deletedAliases {
aliasIds = append(aliasIds, sess.PublicId)
}
return aliasIds, transactionTimestamp, nil
}
// estimatedCount returns an estimate of the total number of items in the alias table.
func (r *Repository) estimatedCount(ctx context.Context) (int, error) {
const op = "target.(Repository).estimatedCount"
rows, err := r.reader.Query(ctx, estimateCount, nil)
if err != nil {
return 0, errors.Wrap(ctx, err, op, errors.WithMsg("failed to query total aliases"))
}
var count int
for rows.Next() {
if err := r.reader.ScanRows(ctx, rows, &count); err != nil {
return 0, errors.Wrap(ctx, err, op, errors.WithMsg("failed to query total aliases"))
}
}
return count, nil
}

@ -0,0 +1,867 @@
// Copyright (c) HashiCorp, Inc.
// SPDX-License-Identifier: BUSL-1.1
package target
import (
"context"
"crypto/rand"
"strings"
"testing"
"github.com/hashicorp/boundary/internal/alias/target/store"
"github.com/hashicorp/boundary/internal/db"
dbassert "github.com/hashicorp/boundary/internal/db/assert"
"github.com/hashicorp/boundary/internal/errors"
"github.com/hashicorp/boundary/internal/iam"
"github.com/hashicorp/boundary/internal/kms"
"github.com/hashicorp/boundary/internal/target/tcp"
"github.com/hashicorp/boundary/internal/types/scope"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestRepository_CreateAlias(t *testing.T) {
ctx := context.Background()
conn, _ := db.TestSetup(t, "postgres")
rw := db.New(conn)
wrapper := db.TestWrapper(t)
kmsCache := kms.TestKms(t, conn, wrapper)
require.NoError(t, kmsCache.CreateKeys(context.Background(), scope.Global.String(), kms.WithRandomReader(rand.Reader)))
_, proj := iam.TestScopes(t, iam.TestRepo(t, conn, wrapper))
tar := tcp.TestTarget(ctx, t, conn, proj.GetPublicId(), "test-target-1")
tests := []struct {
name string
in *Alias
opts []Option
want *Alias
errContains string
}{
{
name: "nil-alias",
errContains: "nil Alias",
},
{
name: "nil-embedded-alias",
in: &Alias{},
errContains: "nil embedded Alias",
},
{
name: "no-value",
in: &Alias{Alias: &store.Alias{
ScopeId: "global",
}},
errContains: "no value",
},
{
name: "no-scope",
in: &Alias{Alias: &store.Alias{
Value: "global",
}},
errContains: "no scope",
},
{
name: "specified-public-id",
in: &Alias{
Alias: &store.Alias{
PublicId: "alt_1234567890",
ScopeId: "global",
Value: "specified-public-id",
},
},
errContains: "public id not empty",
},
{
name: "valid-with-value",
in: &Alias{
Alias: &store.Alias{
ScopeId: "global",
Value: "valid-with-value",
},
},
want: &Alias{
Alias: &store.Alias{
ScopeId: "global",
Value: "valid-with-value",
},
},
},
{
name: "valid-with-name",
in: &Alias{
Alias: &store.Alias{
ScopeId: "global",
Value: "valid-with-name",
Name: "test-name-repo",
},
},
want: &Alias{
Alias: &store.Alias{
ScopeId: "global",
Value: "valid-with-name",
Name: "test-name-repo",
},
},
},
{
name: "valid-with-description",
in: &Alias{
Alias: &store.Alias{
ScopeId: "global",
Value: "valid-with-description",
Description: ("test-description-repo"),
},
},
want: &Alias{
Alias: &store.Alias{
ScopeId: "global",
Value: "valid-with-description",
Description: ("test-description-repo"),
},
},
},
{
name: "valid-with-destination-id",
in: &Alias{
Alias: &store.Alias{
ScopeId: "global",
Value: "valid.with.destination.id",
DestinationId: tar.GetPublicId(),
},
},
want: &Alias{
Alias: &store.Alias{
ScopeId: "global",
Value: "valid.with.destination.id",
DestinationId: tar.GetPublicId(),
},
},
},
{
name: "unknown-destination-id",
in: &Alias{
Alias: &store.Alias{
ScopeId: "global",
Value: "unknown.destination.id",
DestinationId: "ttcp_unknownid",
},
},
errContains: `target with specified destination id "ttcp_unknownid" was not found`,
},
}
for _, tt := range tests {
tt := tt
t.Run(tt.name, func(t *testing.T) {
assert := assert.New(t)
repo, err := NewRepository(ctx, rw, rw, kmsCache)
assert.NoError(err)
assert.NotNil(repo)
got, err := repo.CreateAlias(ctx, tt.in, tt.opts...)
if tt.errContains != "" {
assert.ErrorContains(err, tt.errContains)
assert.Nil(got)
return
}
require.NoError(t, err)
assert.Empty(tt.in.PublicId)
assert.NotNil(t, got)
assertPublicId(t, "alt", got.PublicId)
assert.NotSame(tt.in, got)
assert.Equal(tt.want.Value, got.Value)
assert.Equal(tt.want.Description, got.Description)
assert.Equal(got.CreateTime, got.UpdateTime)
})
}
t.Run("invalid-duplicate-aliases-case-insensitive", func(t *testing.T) {
assert := assert.New(t)
kms := kms.TestKms(t, conn, wrapper)
repo, err := NewRepository(ctx, rw, rw, kms)
assert.NoError(err)
assert.NotNil(repo)
in := &Alias{
Alias: &store.Alias{
ScopeId: "global",
Value: "test-value-repo",
},
}
got, err := repo.CreateAlias(ctx, in)
assert.NoError(err)
require.NotNil(t, got)
assertPublicId(t, "alt", got.PublicId)
assert.NotSame(in, got)
assert.Equal(in.Value, got.Value)
assert.Equal(in.Description, got.Description)
assert.Equal(got.CreateTime, got.UpdateTime)
in.Value = "TEST-VALUE-REPO"
got2, err := repo.CreateAlias(ctx, in)
assert.Truef(errors.Match(errors.T(errors.NotUnique), err), "want err code: %v got err: %v", errors.NotUnique, err)
assert.Nil(got2)
})
t.Run("invalid-duplicate-name", func(t *testing.T) {
assert := assert.New(t)
kms := kms.TestKms(t, conn, wrapper)
repo, err := NewRepository(ctx, rw, rw, kms)
assert.NoError(err)
assert.NotNil(repo)
in := &Alias{
Alias: &store.Alias{
ScopeId: "global",
Value: "test-value-name-1",
},
}
got, err := repo.CreateAlias(ctx, in)
assert.NoError(err)
require.NotNil(t, got)
assertPublicId(t, "alt", got.PublicId)
assert.NotSame(in, got)
assert.Equal(in.Value, got.Value)
assert.Equal(in.Description, got.Description)
assert.Equal(got.CreateTime, got.UpdateTime)
got2, err := repo.CreateAlias(ctx, in)
assert.Truef(errors.Match(errors.T(errors.NotUnique), err), "want err code: %v got err: %v", errors.NotUnique, err)
assert.Nil(got2)
})
}
func assertPublicId(t *testing.T, prefix, actual string) {
t.Helper()
assert.NotEmpty(t, actual)
parts := strings.Split(actual, "_")
assert.Equalf(t, 2, len(parts), "want one '_' in PublicId, got multiple in %q", actual)
assert.Equalf(t, prefix, parts[0], "PublicId want prefix: %q, got: %q in %q", prefix, parts[0], actual)
}
func TestRepository_UpdateAlias(t *testing.T) {
conn, _ := db.TestSetup(t, "postgres")
rw := db.New(conn)
wrapper := db.TestWrapper(t)
ctx := context.Background()
kmsCache := kms.TestKms(t, conn, wrapper)
require.NoError(t, kmsCache.CreateKeys(context.Background(), scope.Global.String(), kms.WithRandomReader(rand.Reader)))
_, proj := iam.TestScopes(t, iam.TestRepo(t, conn, wrapper))
tar1 := tcp.TestTarget(ctx, t, conn, proj.GetPublicId(), "test-target-1")
tar2 := tcp.TestTarget(ctx, t, conn, proj.GetPublicId(), "test-target-2")
_, _ = tar1, tar2
repo, err := NewRepository(ctx, rw, rw, kmsCache)
assert.NoError(t, err)
assert.NotNil(t, repo)
changeValue := func(s string) func(*Alias) *Alias {
return func(c *Alias) *Alias {
c.Value = s
return c
}
}
changeName := func(s string) func(*Alias) *Alias {
return func(c *Alias) *Alias {
c.Name = s
return c
}
}
changeDestinationId := func(s string) func(*Alias) *Alias {
return func(c *Alias) *Alias {
c.DestinationId = s
return c
}
}
changeHostId := func(s string) func(*Alias) *Alias {
return func(c *Alias) *Alias {
c.HostId = s
return c
}
}
changeDescription := func(s string) func(*Alias) *Alias {
return func(c *Alias) *Alias {
c.Description = s
return c
}
}
makeNil := func() func(*Alias) *Alias {
return func(c *Alias) *Alias {
return nil
}
}
makeEmbeddedNil := func() func(*Alias) *Alias {
return func(c *Alias) *Alias {
return &Alias{}
}
}
deletePublicId := func() func(*Alias) *Alias {
return func(c *Alias) *Alias {
c.PublicId = ""
return c
}
}
nonExistentPublicId := func() func(*Alias) *Alias {
return func(c *Alias) *Alias {
c.PublicId = "alt_OOOOOOOOOO"
return c
}
}
combine := func(fns ...func(c *Alias) *Alias) func(*Alias) *Alias {
return func(c *Alias) *Alias {
for _, fn := range fns {
c = fn(c)
}
return c
}
}
tests := []struct {
name string
orig *Alias
chgFn func(*Alias) *Alias
masks []string
want *Alias
wantCount int
wantIsErr errors.Code
}{
{
name: "nil-alias",
orig: &Alias{
Alias: &store.Alias{
Value: "nil-alias",
},
},
chgFn: makeNil(),
masks: []string{"Value", "Description"},
wantIsErr: errors.InvalidParameter,
},
{
name: "nil-embedded-alias",
orig: &Alias{
Alias: &store.Alias{
Value: "nil-embedded-alias",
},
},
chgFn: makeEmbeddedNil(),
masks: []string{"Value", "Description"},
wantIsErr: errors.InvalidParameter,
},
{
name: "no-public-id",
orig: &Alias{
Alias: &store.Alias{
Value: "no-public-id",
},
},
chgFn: deletePublicId(),
masks: []string{"Value", "Description"},
wantIsErr: errors.InvalidParameter,
},
{
name: "updating-non-existent-alias",
orig: &Alias{
Alias: &store.Alias{
Value: "updating-non-existent-alias",
},
},
chgFn: combine(nonExistentPublicId(), changeValue("test-update-name-repo")),
masks: []string{"Value"},
wantIsErr: errors.RecordNotFound,
},
{
name: "empty-field-mask",
orig: &Alias{
Alias: &store.Alias{
Value: "empty-field-mask",
},
},
chgFn: changeValue("test-update-name-repo"),
wantIsErr: errors.EmptyFieldMask,
},
{
name: "read-only-fields-in-field-mask",
orig: &Alias{
Alias: &store.Alias{
Value: "read-only-fields-in-field-mask",
},
},
chgFn: changeValue("test-update-name-repo"),
masks: []string{"PublicId", "CreateTime", "UpdateTime", "ScopeId"},
wantIsErr: errors.InvalidFieldMask,
},
{
name: "unknown-field-in-field-mask",
orig: &Alias{
Alias: &store.Alias{
Value: "unknown-field-in-field-mask",
},
},
chgFn: changeValue("test-update-name-repo"),
masks: []string{"Bilbo"},
wantIsErr: errors.InvalidFieldMask,
},
{
name: "change-value",
orig: &Alias{
Alias: &store.Alias{
Value: "change-value",
},
},
chgFn: changeValue("change-value-updated"),
masks: []string{"Value"},
want: &Alias{
Alias: &store.Alias{
Value: "change-value-updated",
},
},
wantCount: 1,
},
{
name: "change-name",
orig: &Alias{
Alias: &store.Alias{
Value: "change-name",
Name: "change-name",
},
},
chgFn: changeName("change-name-updated"),
masks: []string{"Name"},
want: &Alias{
Alias: &store.Alias{
Value: "change-name",
Name: "change-name-updated",
},
},
wantCount: 1,
},
{
name: "clear-name",
orig: &Alias{
Alias: &store.Alias{
Value: "clear-name",
Name: "clear-name",
},
},
chgFn: changeName(""),
masks: []string{"Name"},
want: &Alias{
Alias: &store.Alias{
Value: "clear-name",
},
},
wantCount: 1,
},
{
name: "change-destination-id",
orig: &Alias{
Alias: &store.Alias{
Value: "change-destination-id",
DestinationId: tar1.GetPublicId(),
},
},
chgFn: changeDestinationId(tar2.GetPublicId()),
masks: []string{"DestinationId"},
want: &Alias{
Alias: &store.Alias{
Value: "change-destination-id",
DestinationId: tar2.GetPublicId(),
},
},
wantCount: 1,
},
{
name: "change-destination-id-to-unknown",
orig: &Alias{
Alias: &store.Alias{
Value: "change-destination-id-to-unknown",
DestinationId: tar1.GetPublicId(),
},
},
chgFn: changeDestinationId("ttcp_unknownid"),
masks: []string{"DestinationId"},
wantIsErr: errors.NotFound,
},
{
name: "delete-destination-id",
orig: &Alias{
Alias: &store.Alias{
Value: "delete-destination-id",
DestinationId: tar1.GetPublicId(),
},
},
chgFn: changeDestinationId(tar2.GetPublicId()),
masks: []string{"DestinationId"},
want: &Alias{
Alias: &store.Alias{
Value: "delete-destination-id",
},
},
wantCount: 1,
},
{
name: "delete-destination-also-deletes-host-id",
orig: &Alias{
Alias: &store.Alias{
Value: "delete-destination-also-deletes-host-id",
DestinationId: tar1.GetPublicId(),
HostId: "hst_1234567890",
},
},
chgFn: changeDestinationId(""),
masks: []string{"DestinationId"},
want: &Alias{
Alias: &store.Alias{
Value: "delete-destination-also-deletes-host-id",
},
},
wantCount: 1,
},
{
name: "change-host-id",
orig: &Alias{
Alias: &store.Alias{
Value: "change-host-id",
DestinationId: tar1.GetPublicId(),
HostId: "hst_1234567890",
},
},
chgFn: changeHostId("hst_0987654321"),
masks: []string{"HostId"},
want: &Alias{
Alias: &store.Alias{
Value: "change-host-id",
DestinationId: tar1.GetPublicId(),
HostId: "hst_0987654321",
},
},
wantCount: 1,
},
{
name: "delete-host-id",
orig: &Alias{
Alias: &store.Alias{
Value: "delete-host-id",
DestinationId: tar1.GetPublicId(),
HostId: "hst_1234567890",
},
},
chgFn: changeHostId(""),
masks: []string{"HostId"},
want: &Alias{
Alias: &store.Alias{
Value: "delete-host-id",
DestinationId: tar1.GetPublicId(),
},
},
wantCount: 1,
},
{
name: "change-description",
orig: &Alias{
Alias: &store.Alias{
Value: "change-description",
Description: "test-description-repo",
},
},
chgFn: changeDescription("test-update-description-repo"),
masks: []string{"Description"},
want: &Alias{
Alias: &store.Alias{
Value: "change-description",
Description: "test-update-description-repo",
},
},
wantCount: 1,
},
{
name: "change-value-and-description",
orig: &Alias{
Alias: &store.Alias{
Value: "change-value-and-description",
Description: "test-description-repo",
},
},
chgFn: combine(changeDescription("test-update-description-repo"), changeValue("change-value-and-description-updated")),
masks: []string{"Value", "Description"},
want: &Alias{
Alias: &store.Alias{
Value: "change-value-and-description-updated",
Description: "test-update-description-repo",
},
},
wantCount: 1,
},
{
name: "delete-value",
orig: &Alias{
Alias: &store.Alias{
Value: "delete-value",
},
},
masks: []string{"Value"},
chgFn: combine(changeDescription("test-update-description-repo"), changeValue("")),
wantIsErr: errors.InvalidParameter,
},
{
name: "delete-description",
orig: &Alias{
Alias: &store.Alias{
Value: "delete-description",
Description: "test-description-repo",
},
},
masks: []string{"Description"},
chgFn: combine(changeDescription(""), changeValue("delete-description-updated")),
want: &Alias{
Alias: &store.Alias{
Value: "delete-description",
},
},
wantCount: 1,
},
{
name: "do-not-delete-value",
orig: &Alias{
Alias: &store.Alias{
Value: "do-not-delete-value",
Description: "test-description-repo",
},
},
masks: []string{"Description"},
chgFn: combine(changeDescription("test-update-description-repo"), changeValue("")),
want: &Alias{
Alias: &store.Alias{
Value: "do-not-delete-value",
Description: "test-update-description-repo",
},
},
wantCount: 1,
},
{
name: "do-not-delete-description",
orig: &Alias{
Alias: &store.Alias{
Value: "do-not-delete-description",
Description: "test-description-repo",
},
},
masks: []string{"Value"},
chgFn: combine(changeDescription(""), changeValue("do-not-delete-description-updated")),
want: &Alias{
Alias: &store.Alias{
Value: "do-not-delete-description-updated",
Description: "test-description-repo",
},
},
wantCount: 1,
},
}
for _, tt := range tests {
tt := tt
t.Run(tt.name, func(t *testing.T) {
tt.orig.ScopeId = "global"
orig, err := repo.CreateAlias(ctx, tt.orig)
require.NoError(t, err)
require.NotNil(t, orig)
if tt.chgFn != nil {
orig = tt.chgFn(orig)
}
got, gotCount, err := repo.UpdateAlias(ctx, orig, 1, tt.masks)
if tt.wantIsErr != 0 {
assert.Truef(t, errors.Match(errors.T(tt.wantIsErr), err), "want err: %q got: %q", tt.wantIsErr, err)
assert.Equal(t, tt.wantCount, gotCount, "row count")
assert.Nil(t, got)
return
}
assert.NoError(t, err)
assert.Empty(t, tt.orig.PublicId)
assert.NotNil(t, got)
assertPublicId(t, "alt", got.PublicId)
assert.Equal(t, tt.wantCount, gotCount, "row count")
assert.NotSame(t, tt.orig, got)
assert.Equal(t, tt.orig.ScopeId, got.ScopeId)
underlyingDB, err := conn.SqlDB(ctx)
require.NoError(t, err)
dbassert := dbassert.New(t, underlyingDB)
if tt.want.Value == "" {
dbassert.IsNull(got, "value")
return
}
assert.Equal(t, tt.want.Value, got.Value)
if tt.want.Description == "" {
dbassert.IsNull(got, "description")
return
}
assert.Equal(t, tt.want.Description, got.Description)
})
}
t.Run("invalid-duplicate-values", func(t *testing.T) {
value := "test-dup-value"
c1 := TestAlias(t, db.New(conn), "test")
c1.Value = value
got1, gotCount1, err := repo.UpdateAlias(context.Background(), c1, 1, []string{"value"})
assert.NoError(t, err)
assert.NotNil(t, got1)
assert.Equal(t, value, got1.Value)
assert.Equal(t, 1, gotCount1, "row count")
c2 := TestAlias(t, db.New(conn), "test2")
c2.Value = value
got2, gotCount2, err := repo.UpdateAlias(context.Background(), c2, 1, []string{"value"})
assert.Truef(t, errors.Match(errors.T(errors.NotUnique), err), "want err code: %v got err: %v", errors.NotUnique, err)
assert.Nil(t, got2)
assert.Equal(t, db.NoRowsAffected, gotCount2, "row count")
})
t.Run("invalid-duplicate-name", func(t *testing.T) {
name := "test-dup-name"
c1 := TestAlias(t, db.New(conn), "duplicate.name.test")
c1.Name = name
got1, gotCount1, err := repo.UpdateAlias(context.Background(), c1, 1, []string{"name"})
assert.NoError(t, err)
assert.NotNil(t, got1)
assert.Equal(t, name, got1.Name)
assert.Equal(t, 1, gotCount1, "row count")
c2 := TestAlias(t, db.New(conn), "duplicate.name.test2")
c2.Name = name
got2, gotCount2, err := repo.UpdateAlias(context.Background(), c2, 1, []string{"name"})
assert.Truef(t, errors.Match(errors.T(errors.NotUnique), err), "want err code: %v got err: %v", errors.NotUnique, err)
assert.Nil(t, got2)
assert.Equal(t, db.NoRowsAffected, gotCount2, "row count")
})
}
func TestRepository_LookupAlias(t *testing.T) {
ctx := context.Background()
conn, _ := db.TestSetup(t, "postgres")
rw := db.New(conn)
wrapper := db.TestWrapper(t)
al := TestAlias(t, rw, "one")
badId, err := newAliasId(ctx)
assert.NoError(t, err)
assert.NotNil(t, badId)
tests := []struct {
name string
id string
want *Alias
wantErr errors.Code
}{
{
name: "found",
id: al.GetPublicId(),
want: al,
},
{
name: "not-found",
id: badId,
want: nil,
},
{
name: "bad-public-id",
id: "",
want: nil,
wantErr: errors.InvalidParameter,
},
}
for _, tt := range tests {
tt := tt
t.Run(tt.name, func(t *testing.T) {
assert := assert.New(t)
kms := kms.TestKms(t, conn, wrapper)
repo, err := NewRepository(ctx, rw, rw, kms)
assert.NoError(err)
assert.NotNil(repo)
got, err := repo.LookupAlias(ctx, tt.id)
if tt.wantErr != 0 {
assert.Truef(errors.Match(errors.T(tt.wantErr), err), "want err: %q got: %q", tt.wantErr, err)
return
}
assert.NoError(err)
switch {
case tt.want == nil:
assert.Nil(got)
case tt.want != nil:
assert.NotNil(got)
assert.Equal(got, tt.want)
}
})
}
}
func TestRepository_DeleteAlias(t *testing.T) {
ctx := context.Background()
conn, _ := db.TestSetup(t, "postgres")
rw := db.New(conn)
wrapper := db.TestWrapper(t)
kmsCache := kms.TestKms(t, conn, wrapper)
require.NoError(t, kmsCache.CreateKeys(context.Background(), scope.Global.String(), kms.WithRandomReader(rand.Reader)))
repo, err := NewRepository(ctx, rw, rw, kmsCache)
assert.NoError(t, err)
require.NotNil(t, repo)
al := TestAlias(t, rw, "deleted.alias")
badId, err := newAliasId(ctx)
assert.NoError(t, err)
assert.NotNil(t, badId)
tests := []struct {
name string
id string
want int
wantErr errors.Code
}{
{
name: "found",
id: al.GetPublicId(),
want: 1,
},
{
name: "not-found",
id: badId,
want: 0,
},
{
name: "bad-public-id",
id: "",
want: 0,
wantErr: errors.InvalidParameter,
},
}
for _, tt := range tests {
tt := tt
t.Run(tt.name, func(t *testing.T) {
assert := assert.New(t)
got, err := repo.DeleteAlias(ctx, tt.id)
if tt.wantErr != 0 {
assert.Truef(errors.Match(errors.T(tt.wantErr), err), "want err: %q got: %q", tt.wantErr, err)
return
}
require.NoError(t, err)
assert.Equal(tt.want, got, "row count")
})
}
}

@ -0,0 +1,123 @@
// Copyright (c) HashiCorp, Inc.
// SPDX-License-Identifier: BUSL-1.1
package target
import (
"context"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/hashicorp/boundary/internal/db"
"github.com/hashicorp/boundary/internal/errors"
"github.com/hashicorp/boundary/internal/kms"
)
func TestRepository_New(t *testing.T) {
ctx := context.Background()
conn, _ := db.TestSetup(t, "postgres")
rw := db.New(conn)
wrapper := db.TestWrapper(t)
kmsCache := kms.TestKms(t, conn, wrapper)
type args struct {
r db.Reader
w db.Writer
kms *kms.Kms
opts []Option
}
tests := []struct {
name string
args args
want *Repository
wantIsErr errors.Code
}{
{
name: "valid",
args: args{
r: rw,
w: rw,
kms: kmsCache,
},
want: &Repository{
reader: rw,
writer: rw,
kms: kmsCache,
defaultLimit: db.DefaultLimit,
},
},
{
name: "valid-with-limit",
args: args{
r: rw,
w: rw,
kms: kmsCache,
opts: []Option{WithLimit(5)},
},
want: &Repository{
reader: rw,
writer: rw,
kms: kmsCache,
defaultLimit: 5,
},
},
{
name: "nil-reader",
args: args{
r: nil,
w: rw,
kms: kmsCache,
},
want: nil,
wantIsErr: errors.InvalidParameter,
},
{
name: "nil-writer",
args: args{
r: rw,
w: nil,
kms: kmsCache,
},
want: nil,
wantIsErr: errors.InvalidParameter,
},
{
name: "nil-kms",
args: args{
r: rw,
w: rw,
kms: nil,
},
want: nil,
wantIsErr: errors.InvalidParameter,
},
{
name: "all-nils",
args: args{
r: nil,
w: nil,
kms: nil,
},
want: nil,
wantIsErr: errors.InvalidParameter,
},
}
for _, tt := range tests {
tt := tt
t.Run(tt.name, func(t *testing.T) {
assert, require := assert.New(t), require.New(t)
got, err := NewRepository(ctx, tt.args.r, tt.args.w, tt.args.kms, tt.args.opts...)
if tt.wantIsErr != 0 {
assert.Truef(errors.Match(errors.T(tt.wantIsErr), err), "want err: %q got: %q", tt.wantIsErr, err)
assert.Nil(got)
return
}
assert.NoError(err)
require.NotNil(got)
assert.Equal(tt.want, got)
})
}
}

@ -0,0 +1,53 @@
// Copyright (c) HashiCorp, Inc.
// SPDX-License-Identifier: BUSL-1.1
package target
import (
"context"
"time"
"github.com/hashicorp/boundary/internal/errors"
"github.com/hashicorp/boundary/internal/pagination"
)
// ListAliases lists up to page size aliases, filtering out entries that
// do not pass the filter item function. It will automatically request
// more aliases from the database, at page size chunks, to fill the page.
// It returns a new list token used to continue pagination or refresh items.
// Aliases are ordered by create time descending (most recently created first).
func ListAliases(
ctx context.Context,
grantsHash []byte,
pageSize int,
filterItemFn pagination.ListFilterFunc[*Alias],
repo *Repository,
withScopeIds []string,
) (*pagination.ListResponse[*Alias], error) {
const op = "target.ListAliases"
switch {
case len(grantsHash) == 0:
return nil, errors.New(ctx, errors.InvalidParameter, op, "missing grants hash")
case pageSize < 1:
return nil, errors.New(ctx, errors.InvalidParameter, op, "page size must be at least 1")
case filterItemFn == nil:
return nil, errors.New(ctx, errors.InvalidParameter, op, "missing filter item callback")
case repo == nil:
return nil, errors.New(ctx, errors.InvalidParameter, op, "missing repo")
case len(withScopeIds) == 0:
return nil, errors.New(ctx, errors.InvalidParameter, op, "missing scope ids")
}
listItemsFn := func(ctx context.Context, lastPageItem *Alias, limit int) ([]*Alias, time.Time, error) {
opts := []Option{
WithLimit(limit),
}
if lastPageItem != nil {
opts = append(opts, WithStartPageAfterItem(lastPageItem))
}
return repo.listAliases(ctx, withScopeIds, opts...)
}
return pagination.List(ctx, grantsHash, pageSize, filterItemFn, listItemsFn, repo.estimatedCount)
}

@ -0,0 +1,616 @@
// Copyright (c) HashiCorp, Inc.
// SPDX-License-Identifier: BUSL-1.1
package target_test
import (
"context"
"crypto/rand"
"fmt"
"slices"
"testing"
"time"
"github.com/google/go-cmp/cmp"
"github.com/google/go-cmp/cmp/cmpopts"
"github.com/hashicorp/boundary/globals"
"github.com/hashicorp/boundary/internal/alias/target"
"github.com/hashicorp/boundary/internal/alias/target/store"
"github.com/hashicorp/boundary/internal/db"
"github.com/hashicorp/boundary/internal/db/timestamp"
"github.com/hashicorp/boundary/internal/kms"
"github.com/hashicorp/boundary/internal/listtoken"
"github.com/hashicorp/boundary/internal/types/resource"
"github.com/hashicorp/boundary/internal/types/scope"
"github.com/stretchr/testify/require"
"google.golang.org/protobuf/types/known/timestamppb"
)
func TestService_ListAliases(t *testing.T) {
fiveDaysAgo := time.Now()
// Set database read timeout to avoid duplicates in response
oldReadTimeout := globals.RefreshReadLookbackDuration
globals.RefreshReadLookbackDuration = 0
t.Cleanup(func() {
globals.RefreshReadLookbackDuration = oldReadTimeout
})
ctx := context.Background()
conn, _ := db.TestSetup(t, "postgres")
sqlDB, err := conn.SqlDB(context.Background())
require.NoError(t, err)
rw := db.New(conn)
wrapper := db.TestWrapper(t)
kmsCache := kms.TestKms(t, conn, wrapper)
require.NoError(t, kmsCache.CreateKeys(context.Background(), scope.Global.String(), kms.WithRandomReader(rand.Reader)))
relevantScopes := []string{"global"}
var allResources []*target.Alias
for i := 0; i < 5; i++ {
r := target.TestAlias(t, rw, fmt.Sprintf("test%d.alias", i))
allResources = append(allResources, r)
}
repo, err := target.NewRepository(ctx, rw, rw, kmsCache)
require.NoError(t, err)
// Reverse since we read items in descending order (newest first)
slices.Reverse(allResources)
// Run analyze to update postgres estimates
_, err = sqlDB.ExecContext(ctx, "analyze")
require.NoError(t, err)
cmpIgnoreUnexportedOpts := cmpopts.IgnoreUnexported(target.Alias{}, store.Alias{}, timestamp.Timestamp{}, timestamppb.Timestamp{})
t.Run("List validation", func(t *testing.T) {
t.Parallel()
t.Run("missing grants hash", func(t *testing.T) {
t.Parallel()
filterFunc := func(_ context.Context, r *target.Alias) (bool, error) {
return true, nil
}
_, err := target.ListAliases(ctx, nil, 1, filterFunc, repo, relevantScopes)
require.ErrorContains(t, err, "missing grants hash")
})
t.Run("zero page size", func(t *testing.T) {
t.Parallel()
filterFunc := func(_ context.Context, r *target.Alias) (bool, error) {
return true, nil
}
_, err := target.ListAliases(ctx, []byte("some hash"), 0, filterFunc, repo, relevantScopes)
require.ErrorContains(t, err, "page size must be at least 1")
})
t.Run("negative page size", func(t *testing.T) {
t.Parallel()
filterFunc := func(_ context.Context, r *target.Alias) (bool, error) {
return true, nil
}
_, err := target.ListAliases(ctx, []byte("some hash"), -1, filterFunc, repo, relevantScopes)
require.ErrorContains(t, err, "page size must be at least 1")
})
t.Run("nil filter func", func(t *testing.T) {
t.Parallel()
_, err := target.ListAliases(ctx, []byte("some hash"), 1, nil, repo, relevantScopes)
require.ErrorContains(t, err, "missing filter item callback")
})
t.Run("nil repo", func(t *testing.T) {
t.Parallel()
filterFunc := func(_ context.Context, r *target.Alias) (bool, error) {
return true, nil
}
_, err := target.ListAliases(ctx, []byte("some hash"), 1, filterFunc, nil, relevantScopes)
require.ErrorContains(t, err, "missing repo")
})
t.Run("missing scope ids", func(t *testing.T) {
t.Parallel()
filterFunc := func(_ context.Context, r *target.Alias) (bool, error) {
return true, nil
}
_, err := target.ListAliases(ctx, []byte("some hash"), 1, filterFunc, repo, nil)
require.ErrorContains(t, err, "missing scope ids")
})
})
t.Run("ListPage validation", func(t *testing.T) {
t.Parallel()
t.Run("missing grants hash", func(t *testing.T) {
t.Parallel()
filterFunc := func(_ context.Context, r *target.Alias) (bool, error) {
return true, nil
}
tok, err := listtoken.NewPagination(ctx, fiveDaysAgo, resource.Alias, []byte("some hash"), "some-id", fiveDaysAgo)
require.NoError(t, err)
_, err = target.ListAliasesPage(ctx, nil, 1, filterFunc, tok, repo, relevantScopes)
require.ErrorContains(t, err, "missing grants hash")
})
t.Run("zero page size", func(t *testing.T) {
t.Parallel()
filterFunc := func(_ context.Context, r *target.Alias) (bool, error) {
return true, nil
}
tok, err := listtoken.NewPagination(ctx, fiveDaysAgo, resource.Alias, []byte("some hash"), "some-id", fiveDaysAgo)
require.NoError(t, err)
_, err = target.ListAliasesPage(ctx, []byte("some hash"), 0, filterFunc, tok, repo, relevantScopes)
require.ErrorContains(t, err, "page size must be at least 1")
})
t.Run("negative page size", func(t *testing.T) {
t.Parallel()
filterFunc := func(_ context.Context, r *target.Alias) (bool, error) {
return true, nil
}
tok, err := listtoken.NewPagination(ctx, fiveDaysAgo, resource.Alias, []byte("some hash"), "some-id", fiveDaysAgo)
require.NoError(t, err)
_, err = target.ListAliasesPage(ctx, []byte("some hash"), -1, filterFunc, tok, repo, relevantScopes)
require.ErrorContains(t, err, "page size must be at least 1")
})
t.Run("nil filter func", func(t *testing.T) {
t.Parallel()
tok, err := listtoken.NewPagination(ctx, fiveDaysAgo, resource.Alias, []byte("some hash"), "some-id", fiveDaysAgo)
require.NoError(t, err)
_, err = target.ListAliasesPage(ctx, []byte("some hash"), 1, nil, tok, repo, relevantScopes)
require.ErrorContains(t, err, "missing filter item callback")
})
t.Run("nil token", func(t *testing.T) {
t.Parallel()
filterFunc := func(_ context.Context, r *target.Alias) (bool, error) {
return true, nil
}
_, err = target.ListAliasesPage(ctx, []byte("some hash"), 1, filterFunc, nil, repo, relevantScopes)
require.ErrorContains(t, err, "missing token")
})
t.Run("wrong token type", func(t *testing.T) {
t.Parallel()
filterFunc := func(_ context.Context, r *target.Alias) (bool, error) {
return true, nil
}
tok, err := listtoken.NewStartRefresh(ctx, fiveDaysAgo, resource.Alias, []byte("some hash"), fiveDaysAgo, fiveDaysAgo)
require.NoError(t, err)
_, err = target.ListAliasesPage(ctx, []byte("some hash"), 1, filterFunc, tok, repo, relevantScopes)
require.ErrorContains(t, err, "token did not have a pagination token component")
})
t.Run("nil repo", func(t *testing.T) {
t.Parallel()
filterFunc := func(_ context.Context, r *target.Alias) (bool, error) {
return true, nil
}
tok, err := listtoken.NewPagination(ctx, fiveDaysAgo, resource.Alias, []byte("some hash"), "some-id", fiveDaysAgo)
require.NoError(t, err)
_, err = target.ListAliasesPage(ctx, []byte("some hash"), 1, filterFunc, tok, nil, relevantScopes)
require.ErrorContains(t, err, "missing repo")
})
t.Run("missing scope ids", func(t *testing.T) {
t.Parallel()
filterFunc := func(_ context.Context, r *target.Alias) (bool, error) {
return true, nil
}
tok, err := listtoken.NewPagination(ctx, fiveDaysAgo, resource.Alias, []byte("some hash"), "some-id", fiveDaysAgo)
require.NoError(t, err)
_, err = target.ListAliasesPage(ctx, []byte("some hash"), 1, filterFunc, tok, repo, nil)
require.ErrorContains(t, err, "missing scope ids")
})
t.Run("wrong token resource type", func(t *testing.T) {
t.Parallel()
filterFunc := func(_ context.Context, r *target.Alias) (bool, error) {
return true, nil
}
tok, err := listtoken.NewPagination(ctx, fiveDaysAgo, resource.Target, []byte("some hash"), "some-id", fiveDaysAgo)
require.NoError(t, err)
_, err = target.ListAliasesPage(ctx, []byte("some hash"), 1, filterFunc, tok, repo, relevantScopes)
require.ErrorContains(t, err, "token did not have a alias resource type")
})
})
t.Run("ListRefresh validation", func(t *testing.T) {
t.Parallel()
t.Run("missing grants hash", func(t *testing.T) {
t.Parallel()
filterFunc := func(_ context.Context, r *target.Alias) (bool, error) {
return true, nil
}
tok, err := listtoken.NewStartRefresh(ctx, fiveDaysAgo, resource.Alias, []byte("some hash"), fiveDaysAgo, fiveDaysAgo)
require.NoError(t, err)
_, err = target.ListAliasesRefresh(ctx, nil, 1, filterFunc, tok, repo, relevantScopes)
require.ErrorContains(t, err, "missing grants hash")
})
t.Run("zero page size", func(t *testing.T) {
t.Parallel()
filterFunc := func(_ context.Context, r *target.Alias) (bool, error) {
return true, nil
}
tok, err := listtoken.NewStartRefresh(ctx, fiveDaysAgo, resource.Alias, []byte("some hash"), fiveDaysAgo, fiveDaysAgo)
require.NoError(t, err)
_, err = target.ListAliasesRefresh(ctx, []byte("some hash"), 0, filterFunc, tok, repo, relevantScopes)
require.ErrorContains(t, err, "page size must be at least 1")
})
t.Run("negative page size", func(t *testing.T) {
t.Parallel()
filterFunc := func(_ context.Context, r *target.Alias) (bool, error) {
return true, nil
}
tok, err := listtoken.NewStartRefresh(ctx, fiveDaysAgo, resource.Alias, []byte("some hash"), fiveDaysAgo, fiveDaysAgo)
require.NoError(t, err)
_, err = target.ListAliasesRefresh(ctx, []byte("some hash"), -1, filterFunc, tok, repo, relevantScopes)
require.ErrorContains(t, err, "page size must be at least 1")
})
t.Run("nil filter func", func(t *testing.T) {
t.Parallel()
tok, err := listtoken.NewStartRefresh(ctx, fiveDaysAgo, resource.Alias, []byte("some hash"), fiveDaysAgo, fiveDaysAgo)
require.NoError(t, err)
_, err = target.ListAliasesRefresh(ctx, []byte("some hash"), 1, nil, tok, repo, relevantScopes)
require.ErrorContains(t, err, "missing filter item callback")
})
t.Run("nil token", func(t *testing.T) {
t.Parallel()
filterFunc := func(_ context.Context, r *target.Alias) (bool, error) {
return true, nil
}
_, err = target.ListAliasesRefresh(ctx, []byte("some hash"), 1, filterFunc, nil, repo, relevantScopes)
require.ErrorContains(t, err, "missing token")
})
t.Run("wrong token type", func(t *testing.T) {
t.Parallel()
filterFunc := func(_ context.Context, r *target.Alias) (bool, error) {
return true, nil
}
tok, err := listtoken.NewPagination(ctx, fiveDaysAgo, resource.Alias, []byte("some hash"), "some-id", fiveDaysAgo)
require.NoError(t, err)
_, err = target.ListAliasesRefresh(ctx, []byte("some hash"), 1, filterFunc, tok, repo, relevantScopes)
require.ErrorContains(t, err, "token did not have a start-refresh token component")
})
t.Run("nil repo", func(t *testing.T) {
t.Parallel()
filterFunc := func(_ context.Context, r *target.Alias) (bool, error) {
return true, nil
}
tok, err := listtoken.NewStartRefresh(ctx, fiveDaysAgo, resource.Alias, []byte("some hash"), fiveDaysAgo, fiveDaysAgo)
require.NoError(t, err)
_, err = target.ListAliasesRefresh(ctx, []byte("some hash"), 1, filterFunc, tok, nil, relevantScopes)
require.ErrorContains(t, err, "missing repo")
})
t.Run("missing scope ids", func(t *testing.T) {
t.Parallel()
filterFunc := func(_ context.Context, r *target.Alias) (bool, error) {
return true, nil
}
tok, err := listtoken.NewStartRefresh(ctx, fiveDaysAgo, resource.Alias, []byte("some hash"), fiveDaysAgo, fiveDaysAgo)
require.NoError(t, err)
_, err = target.ListAliasesRefresh(ctx, []byte("some hash"), 1, filterFunc, tok, repo, nil)
require.ErrorContains(t, err, "missing scope ids")
})
t.Run("wrong token resource type", func(t *testing.T) {
t.Parallel()
filterFunc := func(_ context.Context, r *target.Alias) (bool, error) {
return true, nil
}
tok, err := listtoken.NewStartRefresh(ctx, fiveDaysAgo, resource.Target, []byte("some hash"), fiveDaysAgo, fiveDaysAgo)
require.NoError(t, err)
_, err = target.ListAliasesRefresh(ctx, []byte("some hash"), 1, filterFunc, tok, repo, relevantScopes)
require.ErrorContains(t, err, "token did not have a alias resource type")
})
})
t.Run("ListRefreshPage validation", func(t *testing.T) {
t.Parallel()
t.Run("missing grants hash", func(t *testing.T) {
t.Parallel()
filterFunc := func(_ context.Context, r *target.Alias) (bool, error) {
return true, nil
}
tok, err := listtoken.NewRefresh(ctx, fiveDaysAgo, resource.Alias, []byte("some hash"), fiveDaysAgo, fiveDaysAgo, fiveDaysAgo, "some other id", fiveDaysAgo)
require.NoError(t, err)
_, err = target.ListAliasesRefreshPage(ctx, nil, 1, filterFunc, tok, repo, relevantScopes)
require.ErrorContains(t, err, "missing grants hash")
})
t.Run("zero page size", func(t *testing.T) {
t.Parallel()
filterFunc := func(_ context.Context, r *target.Alias) (bool, error) {
return true, nil
}
tok, err := listtoken.NewRefresh(ctx, fiveDaysAgo, resource.Alias, []byte("some hash"), fiveDaysAgo, fiveDaysAgo, fiveDaysAgo, "some other id", fiveDaysAgo)
require.NoError(t, err)
_, err = target.ListAliasesRefreshPage(ctx, []byte("some hash"), 0, filterFunc, tok, repo, relevantScopes)
require.ErrorContains(t, err, "page size must be at least 1")
})
t.Run("negative page size", func(t *testing.T) {
t.Parallel()
filterFunc := func(_ context.Context, r *target.Alias) (bool, error) {
return true, nil
}
tok, err := listtoken.NewRefresh(ctx, fiveDaysAgo, resource.Alias, []byte("some hash"), fiveDaysAgo, fiveDaysAgo, fiveDaysAgo, "some other id", fiveDaysAgo)
require.NoError(t, err)
_, err = target.ListAliasesRefreshPage(ctx, []byte("some hash"), -1, filterFunc, tok, repo, relevantScopes)
require.ErrorContains(t, err, "page size must be at least 1")
})
t.Run("nil filter func", func(t *testing.T) {
t.Parallel()
tok, err := listtoken.NewRefresh(ctx, fiveDaysAgo, resource.Alias, []byte("some hash"), fiveDaysAgo, fiveDaysAgo, fiveDaysAgo, "some other id", fiveDaysAgo)
require.NoError(t, err)
_, err = target.ListAliasesRefreshPage(ctx, []byte("some hash"), 1, nil, tok, repo, relevantScopes)
require.ErrorContains(t, err, "missing filter item callback")
})
t.Run("nil token", func(t *testing.T) {
t.Parallel()
filterFunc := func(_ context.Context, r *target.Alias) (bool, error) {
return true, nil
}
_, err = target.ListAliasesRefreshPage(ctx, []byte("some hash"), 1, filterFunc, nil, repo, relevantScopes)
require.ErrorContains(t, err, "missing token")
})
t.Run("wrong token type", func(t *testing.T) {
t.Parallel()
filterFunc := func(_ context.Context, r *target.Alias) (bool, error) {
return true, nil
}
tok, err := listtoken.NewPagination(ctx, fiveDaysAgo, resource.Alias, []byte("some hash"), "some-id", fiveDaysAgo)
require.NoError(t, err)
_, err = target.ListAliasesRefreshPage(ctx, []byte("some hash"), 1, filterFunc, tok, repo, relevantScopes)
require.ErrorContains(t, err, "token did not have a refresh token component")
})
t.Run("nil repo", func(t *testing.T) {
t.Parallel()
filterFunc := func(_ context.Context, r *target.Alias) (bool, error) {
return true, nil
}
tok, err := listtoken.NewRefresh(ctx, fiveDaysAgo, resource.Alias, []byte("some hash"), fiveDaysAgo, fiveDaysAgo, fiveDaysAgo, "some other id", fiveDaysAgo)
require.NoError(t, err)
_, err = target.ListAliasesRefreshPage(ctx, []byte("some hash"), 1, filterFunc, tok, nil, relevantScopes)
require.ErrorContains(t, err, "missing repo")
})
t.Run("missing scope ids", func(t *testing.T) {
t.Parallel()
filterFunc := func(_ context.Context, r *target.Alias) (bool, error) {
return true, nil
}
tok, err := listtoken.NewRefresh(ctx, fiveDaysAgo, resource.Alias, []byte("some hash"), fiveDaysAgo, fiveDaysAgo, fiveDaysAgo, "some other id", fiveDaysAgo)
require.NoError(t, err)
_, err = target.ListAliasesRefreshPage(ctx, []byte("some hash"), 1, filterFunc, tok, repo, nil)
require.ErrorContains(t, err, "missing scope ids")
})
t.Run("wrong token resource type", func(t *testing.T) {
t.Parallel()
filterFunc := func(_ context.Context, r *target.Alias) (bool, error) {
return true, nil
}
tok, err := listtoken.NewRefresh(ctx, fiveDaysAgo, resource.Target, []byte("some hash"), fiveDaysAgo, fiveDaysAgo, fiveDaysAgo, "some other id", fiveDaysAgo)
require.NoError(t, err)
_, err = target.ListAliasesRefreshPage(ctx, []byte("some hash"), 1, filterFunc, tok, repo, relevantScopes)
require.ErrorContains(t, err, "token did not have a alias resource type")
})
})
t.Run("simple pagination", func(t *testing.T) {
filterFunc := func(_ context.Context, r *target.Alias) (bool, error) {
return true, nil
}
resp, err := target.ListAliases(ctx, []byte("some hash"), 1, filterFunc, repo, relevantScopes)
require.NoError(t, err)
require.NotNil(t, resp.ListToken)
require.Equal(t, resp.ListToken.GrantsHash, []byte("some hash"))
require.False(t, resp.CompleteListing)
require.Equal(t, resp.EstimatedItemCount, 5)
require.Empty(t, resp.DeletedIds)
require.Len(t, resp.Items, 1)
require.Empty(t, cmp.Diff(resp.Items[0], allResources[0], cmpIgnoreUnexportedOpts))
resp2, err := target.ListAliasesPage(ctx, []byte("some hash"), 1, filterFunc, resp.ListToken, repo, relevantScopes)
require.NoError(t, err)
require.Equal(t, resp2.ListToken.GrantsHash, []byte("some hash"))
require.False(t, resp2.CompleteListing)
require.Equal(t, resp2.EstimatedItemCount, 5)
require.Empty(t, resp2.DeletedIds)
require.Len(t, resp2.Items, 1)
require.Empty(t, cmp.Diff(resp2.Items[0], allResources[1], cmpIgnoreUnexportedOpts))
resp3, err := target.ListAliasesPage(ctx, []byte("some hash"), 1, filterFunc, resp2.ListToken, repo, relevantScopes)
require.NoError(t, err)
require.Equal(t, resp3.ListToken.GrantsHash, []byte("some hash"))
require.False(t, resp3.CompleteListing)
require.Equal(t, resp3.EstimatedItemCount, 5)
require.Empty(t, resp3.DeletedIds)
require.Len(t, resp3.Items, 1)
require.Empty(t, cmp.Diff(resp3.Items[0], allResources[2], cmpIgnoreUnexportedOpts))
resp4, err := target.ListAliasesPage(ctx, []byte("some hash"), 1, filterFunc, resp3.ListToken, repo, relevantScopes)
require.NoError(t, err)
require.Equal(t, resp4.ListToken.GrantsHash, []byte("some hash"))
require.False(t, resp4.CompleteListing)
require.Equal(t, resp4.EstimatedItemCount, 5)
require.Empty(t, resp4.DeletedIds)
require.Len(t, resp4.Items, 1)
require.Empty(t, cmp.Diff(resp4.Items[0], allResources[3], cmpIgnoreUnexportedOpts))
resp5, err := target.ListAliasesPage(ctx, []byte("some hash"), 1, filterFunc, resp4.ListToken, repo, relevantScopes)
require.NoError(t, err)
require.Equal(t, resp5.ListToken.GrantsHash, []byte("some hash"))
require.True(t, resp5.CompleteListing)
require.Equal(t, resp5.EstimatedItemCount, 5)
require.Empty(t, resp5.DeletedIds)
require.Len(t, resp5.Items, 1)
require.Empty(t, cmp.Diff(resp5.Items[0], allResources[4], cmpIgnoreUnexportedOpts))
// Finished initial pagination phase, request refresh
// Expect no results.
resp6, err := target.ListAliasesRefresh(ctx, []byte("some hash"), 1, filterFunc, resp5.ListToken, repo, relevantScopes)
require.NoError(t, err)
require.Equal(t, resp6.ListToken.GrantsHash, []byte("some hash"))
require.True(t, resp6.CompleteListing)
require.Equal(t, resp6.EstimatedItemCount, 5)
require.Empty(t, resp6.DeletedIds)
require.Empty(t, resp6.Items)
// Create some new aliases
newR1 := target.TestAlias(t, rw, "first.new.alias")
newR2 := target.TestAlias(t, rw, "second.new.alias")
t.Cleanup(func() {
_, err = repo.DeleteAlias(ctx, newR1.GetPublicId())
require.NoError(t, err)
_, err = repo.DeleteAlias(ctx, newR2.GetPublicId())
require.NoError(t, err)
// Run analyze to update count estimate
_, err = sqlDB.ExecContext(ctx, "analyze")
require.NoError(t, err)
})
// Run analyze to update count estimate
_, err = sqlDB.ExecContext(ctx, "analyze")
require.NoError(t, err)
// Refresh again, should get newR2
resp7, err := target.ListAliasesRefresh(ctx, []byte("some hash"), 1, filterFunc, resp6.ListToken, repo, relevantScopes)
require.NoError(t, err)
require.Equal(t, resp7.ListToken.GrantsHash, []byte("some hash"))
require.False(t, resp7.CompleteListing)
require.Equal(t, resp7.EstimatedItemCount, 7)
require.Empty(t, resp7.DeletedIds)
require.Len(t, resp7.Items, 1)
require.Empty(t, cmp.Diff(resp7.Items[0], newR2, cmpIgnoreUnexportedOpts))
// Refresh again, should get newR1
resp8, err := target.ListAliasesRefreshPage(ctx, []byte("some hash"), 1, filterFunc, resp7.ListToken, repo, relevantScopes)
require.NoError(t, err)
require.Equal(t, resp8.ListToken.GrantsHash, []byte("some hash"))
require.True(t, resp8.CompleteListing)
require.Equal(t, resp8.EstimatedItemCount, 7)
require.Empty(t, resp8.DeletedIds)
require.Len(t, resp8.Items, 1)
require.Empty(t, cmp.Diff(resp8.Items[0], newR1, cmpIgnoreUnexportedOpts))
// Refresh again, should get no results
resp9, err := target.ListAliasesRefresh(ctx, []byte("some hash"), 1, filterFunc, resp8.ListToken, repo, relevantScopes)
require.NoError(t, err)
require.Equal(t, resp9.ListToken.GrantsHash, []byte("some hash"))
require.True(t, resp9.CompleteListing)
require.Equal(t, resp9.EstimatedItemCount, 7)
require.Empty(t, resp9.DeletedIds)
require.Empty(t, resp9.Items)
})
t.Run("simple pagination with aggressive filtering", func(t *testing.T) {
filterFunc := func(_ context.Context, r *target.Alias) (bool, error) {
return r.GetPublicId() == allResources[1].GetPublicId() ||
r.GetPublicId() == allResources[len(allResources)-1].GetPublicId(), nil
}
resp, err := target.ListAliases(ctx, []byte("some hash"), 1, filterFunc, repo, relevantScopes)
require.NoError(t, err)
require.NotNil(t, resp.ListToken)
require.Equal(t, resp.ListToken.GrantsHash, []byte("some hash"))
require.False(t, resp.CompleteListing)
require.Equal(t, resp.EstimatedItemCount, 5)
require.Empty(t, resp.DeletedIds)
require.Len(t, resp.Items, 1)
require.Empty(t, cmp.Diff(resp.Items[0], allResources[1], cmpIgnoreUnexportedOpts))
resp2, err := target.ListAliasesPage(ctx, []byte("some hash"), 1, filterFunc, resp.ListToken, repo, relevantScopes)
require.NoError(t, err)
require.NotNil(t, resp2.ListToken)
require.Equal(t, resp2.ListToken.GrantsHash, []byte("some hash"))
require.True(t, resp2.CompleteListing)
require.Equal(t, resp2.EstimatedItemCount, 5)
require.Empty(t, resp2.DeletedIds)
require.Len(t, resp2.Items, 1)
require.Empty(t, cmp.Diff(resp2.Items[0], allResources[len(allResources)-1], cmpIgnoreUnexportedOpts))
// request a refresh, nothing should be returned
resp3, err := target.ListAliasesRefresh(ctx, []byte("some hash"), 1, filterFunc, resp.ListToken, repo, relevantScopes)
require.NoError(t, err)
require.Equal(t, resp3.ListToken.GrantsHash, []byte("some hash"))
require.True(t, resp3.CompleteListing)
require.Equal(t, resp3.EstimatedItemCount, 5)
require.Empty(t, resp3.DeletedIds)
require.Empty(t, resp3.Items)
// Create some new aliases
newR1 := target.TestAlias(t, rw, "new.alias.one")
newR2 := target.TestAlias(t, rw, "new.alias.two")
newR3 := target.TestAlias(t, rw, "new.alias.three")
newR4 := target.TestAlias(t, rw, "new.alias.four")
// Run analyze to update count estimate
_, err = sqlDB.ExecContext(ctx, "analyze")
require.NoError(t, err)
t.Cleanup(func() {
_, err = repo.DeleteAlias(ctx, newR1.GetPublicId())
require.NoError(t, err)
_, err = repo.DeleteAlias(ctx, newR2.GetPublicId())
require.NoError(t, err)
_, err = repo.DeleteAlias(ctx, newR3.GetPublicId())
require.NoError(t, err)
_, err = repo.DeleteAlias(ctx, newR4.GetPublicId())
require.NoError(t, err)
// Run analyze to update count estimate
_, err = sqlDB.ExecContext(ctx, "analyze")
require.NoError(t, err)
})
filterFunc = func(_ context.Context, r *target.Alias) (bool, error) {
return r.GetPublicId() == newR3.GetPublicId() ||
r.GetPublicId() == newR1.GetPublicId(), nil
}
// Refresh again, should get newR3
resp4, err := target.ListAliasesRefresh(ctx, []byte("some hash"), 1, filterFunc, resp3.ListToken, repo, relevantScopes)
require.NoError(t, err)
require.Equal(t, resp4.ListToken.GrantsHash, []byte("some hash"))
require.False(t, resp4.CompleteListing)
require.Equal(t, resp4.EstimatedItemCount, 9)
require.Empty(t, resp4.DeletedIds)
require.Len(t, resp4.Items, 1)
require.Empty(t, cmp.Diff(resp4.Items[0], newR3, cmpIgnoreUnexportedOpts))
// Refresh again, should get newR1
resp5, err := target.ListAliasesRefreshPage(ctx, []byte("some hash"), 1, filterFunc, resp4.ListToken, repo, relevantScopes)
require.NoError(t, err)
require.Equal(t, resp5.ListToken.GrantsHash, []byte("some hash"))
require.True(t, resp5.CompleteListing)
require.Equal(t, resp5.EstimatedItemCount, 9)
require.Empty(t, resp5.DeletedIds)
require.Len(t, resp5.Items, 1)
require.Empty(t, cmp.Diff(resp5.Items[0], newR1, cmpIgnoreUnexportedOpts))
})
t.Run("simple pagination with deletion", func(t *testing.T) {
filterFunc := func(_ context.Context, r *target.Alias) (bool, error) {
return true, nil
}
deletedAliasId := allResources[0].GetPublicId()
_, err := repo.DeleteAlias(ctx, deletedAliasId)
require.NoError(t, err)
allResources = allResources[1:]
// Run analyze to update count estimate
_, err = sqlDB.ExecContext(ctx, "analyze")
require.NoError(t, err)
resp, err := target.ListAliases(ctx, []byte("some hash"), 1, filterFunc, repo, relevantScopes)
require.NoError(t, err)
require.NotNil(t, resp.ListToken)
require.Equal(t, resp.ListToken.GrantsHash, []byte("some hash"))
require.False(t, resp.CompleteListing)
require.Equal(t, resp.EstimatedItemCount, 4)
require.Empty(t, resp.DeletedIds)
require.Len(t, resp.Items, 1)
require.Empty(t, cmp.Diff(resp.Items[0], allResources[0], cmpIgnoreUnexportedOpts))
// request remaining results
resp2, err := target.ListAliasesPage(ctx, []byte("some hash"), 3, filterFunc, resp.ListToken, repo, relevantScopes)
require.NoError(t, err)
require.Equal(t, resp2.ListToken.GrantsHash, []byte("some hash"))
require.True(t, resp2.CompleteListing)
require.Equal(t, resp2.EstimatedItemCount, 4)
require.Empty(t, resp2.DeletedIds)
require.Len(t, resp2.Items, 3)
require.Empty(t, cmp.Diff(resp2.Items, allResources[1:], cmpIgnoreUnexportedOpts))
deletedAliasId = allResources[0].GetPublicId()
_, err = repo.DeleteAlias(ctx, deletedAliasId)
require.NoError(t, err)
allResources = allResources[1:]
// Run analyze to update count estimate
_, err = sqlDB.ExecContext(ctx, "analyze")
require.NoError(t, err)
// request a refresh, nothing should be returned except the deleted id
resp3, err := target.ListAliasesRefresh(ctx, []byte("some hash"), 1, filterFunc, resp2.ListToken, repo, relevantScopes)
require.NoError(t, err)
require.Equal(t, resp3.ListToken.GrantsHash, []byte("some hash"))
require.True(t, resp3.CompleteListing)
require.Equal(t, resp3.EstimatedItemCount, 3)
require.Contains(t, resp3.DeletedIds, deletedAliasId)
require.Empty(t, resp3.Items)
})
}

@ -0,0 +1,70 @@
// Copyright (c) HashiCorp, Inc.
// SPDX-License-Identifier: BUSL-1.1
package target
import (
"context"
"time"
"github.com/hashicorp/boundary/internal/errors"
"github.com/hashicorp/boundary/internal/listtoken"
"github.com/hashicorp/boundary/internal/pagination"
"github.com/hashicorp/boundary/internal/types/resource"
)
// ListAliasesPage lists up to page size aliases, filtering out entries that
// do not pass the filter item function. It will automatically request
// more aliases from the database, at page size chunks, to fill the page.
// It will start its paging based on the information in the token.
// It returns a new list token used to continue pagination or refresh items.
// Aliases are ordered by create time descending (most recently created first).
func ListAliasesPage(
ctx context.Context,
grantsHash []byte,
pageSize int,
filterItemFn pagination.ListFilterFunc[*Alias],
tok *listtoken.Token,
repo *Repository,
withScopeIds []string,
) (*pagination.ListResponse[*Alias], error) {
const op = "target.ListAliasesPage"
switch {
case len(grantsHash) == 0:
return nil, errors.New(ctx, errors.InvalidParameter, op, "missing grants hash")
case pageSize < 1:
return nil, errors.New(ctx, errors.InvalidParameter, op, "page size must be at least 1")
case filterItemFn == nil:
return nil, errors.New(ctx, errors.InvalidParameter, op, "missing filter item callback")
case tok == nil:
return nil, errors.New(ctx, errors.InvalidParameter, op, "missing token")
case repo == nil:
return nil, errors.New(ctx, errors.InvalidParameter, op, "missing repo")
case len(withScopeIds) == 0:
return nil, errors.New(ctx, errors.InvalidParameter, op, "missing scope ids")
case tok.ResourceType != resource.Alias:
return nil, errors.New(ctx, errors.InvalidParameter, op, "token did not have a alias resource type")
}
if _, ok := tok.Subtype.(*listtoken.PaginationToken); !ok {
return nil, errors.New(ctx, errors.InvalidParameter, op, "token did not have a pagination token component")
}
listItemsFn := func(ctx context.Context, lastPageItem *Alias, limit int) ([]*Alias, time.Time, error) {
opts := []Option{
WithLimit(limit),
}
if lastPageItem != nil {
opts = append(opts, WithStartPageAfterItem(lastPageItem))
} else {
lastItem, err := tok.LastItem(ctx)
if err != nil {
return nil, time.Time{}, err
}
opts = append(opts, WithStartPageAfterItem(lastItem))
}
return repo.listAliases(ctx, withScopeIds, opts...)
}
return pagination.ListPage(ctx, grantsHash, pageSize, filterItemFn, listItemsFn, repo.estimatedCount, tok)
}

@ -0,0 +1,76 @@
// Copyright (c) HashiCorp, Inc.
// SPDX-License-Identifier: BUSL-1.1
package target
import (
"context"
"time"
"github.com/hashicorp/boundary/globals"
"github.com/hashicorp/boundary/internal/errors"
"github.com/hashicorp/boundary/internal/listtoken"
"github.com/hashicorp/boundary/internal/pagination"
"github.com/hashicorp/boundary/internal/types/resource"
)
// ListAliasesRefresh lists up to page size aliases, filtering out entries that
// do not pass the filter item function. It will automatically request
// more aliases from the database, at page size chunks, to fill the page.
// It will start its paging based on the information in the token.
// It returns a new list token used to continue pagination or refresh items.
// Aliases are ordered by update time descending (most recently updated first).
// Aliases may contain items that were already returned during the initial
// pagination phase. It also returns a list of any aliases deleted since the
// start of the initial pagination phase or last response.
func ListAliasesRefresh(
ctx context.Context,
grantsHash []byte,
pageSize int,
filterItemFn pagination.ListFilterFunc[*Alias],
tok *listtoken.Token,
repo *Repository,
withScopeIds []string,
) (*pagination.ListResponse[*Alias], error) {
const op = "target.ListAliasesRefresh"
switch {
case len(grantsHash) == 0:
return nil, errors.New(ctx, errors.InvalidParameter, op, "missing grants hash")
case pageSize < 1:
return nil, errors.New(ctx, errors.InvalidParameter, op, "page size must be at least 1")
case filterItemFn == nil:
return nil, errors.New(ctx, errors.InvalidParameter, op, "missing filter item callback")
case tok == nil:
return nil, errors.New(ctx, errors.InvalidParameter, op, "missing token")
case repo == nil:
return nil, errors.New(ctx, errors.InvalidParameter, op, "missing repo")
case len(withScopeIds) == 0:
return nil, errors.New(ctx, errors.InvalidParameter, op, "missing scope ids")
case tok.ResourceType != resource.Alias:
return nil, errors.New(ctx, errors.InvalidParameter, op, "token did not have a alias resource type")
}
rt, ok := tok.Subtype.(*listtoken.StartRefreshToken)
if !ok {
return nil, errors.New(ctx, errors.InvalidParameter, op, "token did not have a start-refresh token component")
}
listItemsFn := func(ctx context.Context, lastPageItem *Alias, limit int) ([]*Alias, time.Time, error) {
opts := []Option{
WithLimit(limit),
}
if lastPageItem != nil {
opts = append(opts, WithStartPageAfterItem(lastPageItem))
}
// Add the database read timeout to account for any creations missed due to concurrent
// transactions in the initial pagination phase.
return repo.listAliasesRefresh(ctx, rt.PreviousPhaseUpperBound.Add(-globals.RefreshReadLookbackDuration), withScopeIds, opts...)
}
listDeletedIdsFn := func(ctx context.Context, since time.Time) ([]string, time.Time, error) {
// Add the database read timeout to account for any deletions missed due to concurrent
// transactions in previous requests.
return repo.listDeletedIds(ctx, since.Add(-globals.RefreshReadLookbackDuration))
}
return pagination.ListRefresh(ctx, grantsHash, pageSize, filterItemFn, listItemsFn, repo.estimatedCount, listDeletedIdsFn, tok)
}

@ -0,0 +1,83 @@
// Copyright (c) HashiCorp, Inc.
// SPDX-License-Identifier: BUSL-1.1
package target
import (
"context"
"time"
"github.com/hashicorp/boundary/globals"
"github.com/hashicorp/boundary/internal/errors"
"github.com/hashicorp/boundary/internal/listtoken"
"github.com/hashicorp/boundary/internal/pagination"
"github.com/hashicorp/boundary/internal/types/resource"
)
// ListAliasesRefreshPage lists up to page size aliases, filtering out entries that
// do not pass the filter item function. It will automatically request
// more aliases from the database, at page size chunks, to fill the page.
// It will start its paging based on the information in the token.
// It returns a new list token used to continue pagination or refresh items.
// Aliases are ordered by update time descending (most recently updated first).
// Aliases may contain items that were already returned during the initial
// pagination phase. It also returns a list of any aliases deleted since the
// last response.
func ListAliasesRefreshPage(
ctx context.Context,
grantsHash []byte,
pageSize int,
filterItemFn pagination.ListFilterFunc[*Alias],
tok *listtoken.Token,
repo *Repository,
withScopeIds []string,
) (*pagination.ListResponse[*Alias], error) {
const op = "target.ListAliasesRefreshPage"
switch {
case len(grantsHash) == 0:
return nil, errors.New(ctx, errors.InvalidParameter, op, "missing grants hash")
case pageSize < 1:
return nil, errors.New(ctx, errors.InvalidParameter, op, "page size must be at least 1")
case filterItemFn == nil:
return nil, errors.New(ctx, errors.InvalidParameter, op, "missing filter item callback")
case tok == nil:
return nil, errors.New(ctx, errors.InvalidParameter, op, "missing token")
case repo == nil:
return nil, errors.New(ctx, errors.InvalidParameter, op, "missing repo")
case len(withScopeIds) == 0:
return nil, errors.New(ctx, errors.InvalidParameter, op, "missing scope ids")
case tok.ResourceType != resource.Alias:
return nil, errors.New(ctx, errors.InvalidParameter, op, "token did not have a alias resource type")
}
rt, ok := tok.Subtype.(*listtoken.RefreshToken)
if !ok {
return nil, errors.New(ctx, errors.InvalidParameter, op, "token did not have a refresh token component")
}
listItemsFn := func(ctx context.Context, lastPageItem *Alias, limit int) ([]*Alias, time.Time, error) {
opts := []Option{
WithLimit(limit),
}
if lastPageItem != nil {
opts = append(opts, WithStartPageAfterItem(lastPageItem))
} else {
lastItem, err := tok.LastItem(ctx)
if err != nil {
return nil, time.Time{}, err
}
opts = append(opts, WithStartPageAfterItem(lastItem))
}
// Add the database read timeout to account for any creations missed due to concurrent
// transactions in the original list pagination phase.
return repo.listAliasesRefresh(ctx, rt.PhaseLowerBound.Add(-globals.RefreshReadLookbackDuration), withScopeIds, opts...)
}
listDeletedIdsFn := func(ctx context.Context, since time.Time) ([]string, time.Time, error) {
// Add the database read timeout to account for any deletes missed due to concurrent
// transactions in the original list pagination phase.
return repo.listDeletedIds(ctx, since.Add(-globals.RefreshReadLookbackDuration))
}
return pagination.ListRefreshPage(ctx, grantsHash, pageSize, filterItemFn, listItemsFn, repo.estimatedCount, listDeletedIdsFn, tok)
}

@ -0,0 +1,280 @@
// Copyright (c) HashiCorp, Inc.
// SPDX-License-Identifier: BUSL-1.1
// Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
// protoc-gen-go v1.31.0
// protoc (unknown)
// source: controller/storage/alias/target/store/v1/alias.proto
package store
import (
timestamp "github.com/hashicorp/boundary/internal/db/timestamp"
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
protoimpl "google.golang.org/protobuf/runtime/protoimpl"
reflect "reflect"
sync "sync"
)
const (
// Verify that this generated code is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion)
// Verify that runtime/protoimpl is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20)
)
type Alias struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
// public_id is used to access the auth token via an API
// @inject_tag: gorm:"primary_key"
PublicId string `protobuf:"bytes,1,opt,name=public_id,json=publicId,proto3" json:"public_id,omitempty" gorm:"primary_key"`
// The scope_id of the owning scope and must be set.
// @inject_tag: `gorm:"not_null"`
ScopeId string `protobuf:"bytes,2,opt,name=scope_id,json=scopeId,proto3" json:"scope_id,omitempty" gorm:"not_null"`
// create_time from the RDBMS
// @inject_tag: `gorm:"default:current_timestamp"`
CreateTime *timestamp.Timestamp `protobuf:"bytes,3,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty" gorm:"default:current_timestamp"`
// update_time from the RDBMS
// @inject_tag: `gorm:"default:current_timestamp"`
UpdateTime *timestamp.Timestamp `protobuf:"bytes,4,opt,name=update_time,json=updateTime,proto3" json:"update_time,omitempty" gorm:"default:current_timestamp"`
// name is optional
// @inject_tag: `gorm:"default:null"`
Name string `protobuf:"bytes,5,opt,name=name,proto3" json:"name,omitempty" gorm:"default:null"`
// description is optional.
// @inject_tag: `gorm:"default:null"`
Description string `protobuf:"bytes,6,opt,name=description,proto3" json:"description,omitempty" gorm:"default:null"`
// version allows optimistic locking of the resource
// @inject_tag: `gorm:"default:null"`
Version uint32 `protobuf:"varint,7,opt,name=version,proto3" json:"version,omitempty" gorm:"default:null"`
// value is required and is the value of the value.
// @inject_tag: `gorm:"not_null"`
Value string `protobuf:"bytes,8,opt,name=value,proto3" json:"value,omitempty" gorm:"not_null"`
// destination_id is optional and is the ID of the target this alias points
// to.
// @inject_tag: `gorm:"default:null"`
DestinationId string `protobuf:"bytes,9,opt,name=destination_id,json=destinationId,proto3" json:"destination_id,omitempty" gorm:"default:null"`
// host_id is optional and can only be set if destination_id is also set.
// The value of this field is used when performing an authorize-session
// action using this alias and specifies the host id to which the session
// is authorized on the target if possible. The host id this points does not
// need to be associated with the target yet nor even exist. If that is the
// case then the authorize-session action will return an error when using
// this alias.
// @inject_tag: `gorm:"default:null"`
HostId string `protobuf:"bytes,10,opt,name=host_id,json=hostId,proto3" json:"host_id,omitempty" gorm:"default:null"`
}
func (x *Alias) Reset() {
*x = Alias{}
if protoimpl.UnsafeEnabled {
mi := &file_controller_storage_alias_target_store_v1_alias_proto_msgTypes[0]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *Alias) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*Alias) ProtoMessage() {}
func (x *Alias) ProtoReflect() protoreflect.Message {
mi := &file_controller_storage_alias_target_store_v1_alias_proto_msgTypes[0]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use Alias.ProtoReflect.Descriptor instead.
func (*Alias) Descriptor() ([]byte, []int) {
return file_controller_storage_alias_target_store_v1_alias_proto_rawDescGZIP(), []int{0}
}
func (x *Alias) GetPublicId() string {
if x != nil {
return x.PublicId
}
return ""
}
func (x *Alias) GetScopeId() string {
if x != nil {
return x.ScopeId
}
return ""
}
func (x *Alias) GetCreateTime() *timestamp.Timestamp {
if x != nil {
return x.CreateTime
}
return nil
}
func (x *Alias) GetUpdateTime() *timestamp.Timestamp {
if x != nil {
return x.UpdateTime
}
return nil
}
func (x *Alias) GetName() string {
if x != nil {
return x.Name
}
return ""
}
func (x *Alias) GetDescription() string {
if x != nil {
return x.Description
}
return ""
}
func (x *Alias) GetVersion() uint32 {
if x != nil {
return x.Version
}
return 0
}
func (x *Alias) GetValue() string {
if x != nil {
return x.Value
}
return ""
}
func (x *Alias) GetDestinationId() string {
if x != nil {
return x.DestinationId
}
return ""
}
func (x *Alias) GetHostId() string {
if x != nil {
return x.HostId
}
return ""
}
var File_controller_storage_alias_target_store_v1_alias_proto protoreflect.FileDescriptor
var file_controller_storage_alias_target_store_v1_alias_proto_rawDesc = []byte{
0x0a, 0x34, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2f, 0x73, 0x74, 0x6f,
0x72, 0x61, 0x67, 0x65, 0x2f, 0x61, 0x6c, 0x69, 0x61, 0x73, 0x2f, 0x74, 0x61, 0x72, 0x67, 0x65,
0x74, 0x2f, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2f, 0x76, 0x31, 0x2f, 0x61, 0x6c, 0x69, 0x61, 0x73,
0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x28, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c,
0x65, 0x72, 0x2e, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x2e, 0x61, 0x6c, 0x69, 0x61, 0x73,
0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x76, 0x31,
0x1a, 0x2f, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2f, 0x73, 0x74, 0x6f,
0x72, 0x61, 0x67, 0x65, 0x2f, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x2f, 0x76,
0x31, 0x2f, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x74,
0x6f, 0x22, 0xff, 0x02, 0x0a, 0x05, 0x41, 0x6c, 0x69, 0x61, 0x73, 0x12, 0x1b, 0x0a, 0x09, 0x70,
0x75, 0x62, 0x6c, 0x69, 0x63, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08,
0x70, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x49, 0x64, 0x12, 0x19, 0x0a, 0x08, 0x73, 0x63, 0x6f, 0x70,
0x65, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x73, 0x63, 0x6f, 0x70,
0x65, 0x49, 0x64, 0x12, 0x4b, 0x0a, 0x0b, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x5f, 0x74, 0x69,
0x6d, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2a, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x72,
0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x2e, 0x74, 0x69,
0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x2e, 0x76, 0x31, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73,
0x74, 0x61, 0x6d, 0x70, 0x52, 0x0a, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x54, 0x69, 0x6d, 0x65,
0x12, 0x4b, 0x0a, 0x0b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x18,
0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2a, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c,
0x65, 0x72, 0x2e, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x2e, 0x74, 0x69, 0x6d, 0x65, 0x73,
0x74, 0x61, 0x6d, 0x70, 0x2e, 0x76, 0x31, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d,
0x70, 0x52, 0x0a, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x54, 0x69, 0x6d, 0x65, 0x12, 0x12, 0x0a,
0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d,
0x65, 0x12, 0x20, 0x0a, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e,
0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74,
0x69, 0x6f, 0x6e, 0x12, 0x18, 0x0a, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x07,
0x20, 0x01, 0x28, 0x0d, 0x52, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x14, 0x0a,
0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x08, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61,
0x6c, 0x75, 0x65, 0x12, 0x25, 0x0a, 0x0e, 0x64, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x61, 0x74, 0x69,
0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x09, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0d, 0x64, 0x65, 0x73,
0x74, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x12, 0x17, 0x0a, 0x07, 0x68, 0x6f,
0x73, 0x74, 0x5f, 0x69, 0x64, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x68, 0x6f, 0x73,
0x74, 0x49, 0x64, 0x42, 0x41, 0x5a, 0x3f, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f,
0x6d, 0x2f, 0x68, 0x61, 0x73, 0x68, 0x69, 0x63, 0x6f, 0x72, 0x70, 0x2f, 0x62, 0x6f, 0x75, 0x6e,
0x64, 0x61, 0x72, 0x79, 0x2f, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2f, 0x61, 0x6c,
0x69, 0x61, 0x73, 0x2f, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x2f, 0x73, 0x74, 0x6f, 0x72, 0x65,
0x3b, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
}
var (
file_controller_storage_alias_target_store_v1_alias_proto_rawDescOnce sync.Once
file_controller_storage_alias_target_store_v1_alias_proto_rawDescData = file_controller_storage_alias_target_store_v1_alias_proto_rawDesc
)
func file_controller_storage_alias_target_store_v1_alias_proto_rawDescGZIP() []byte {
file_controller_storage_alias_target_store_v1_alias_proto_rawDescOnce.Do(func() {
file_controller_storage_alias_target_store_v1_alias_proto_rawDescData = protoimpl.X.CompressGZIP(file_controller_storage_alias_target_store_v1_alias_proto_rawDescData)
})
return file_controller_storage_alias_target_store_v1_alias_proto_rawDescData
}
var file_controller_storage_alias_target_store_v1_alias_proto_msgTypes = make([]protoimpl.MessageInfo, 1)
var file_controller_storage_alias_target_store_v1_alias_proto_goTypes = []interface{}{
(*Alias)(nil), // 0: controller.storage.alias.target.store.v1.Alias
(*timestamp.Timestamp)(nil), // 1: controller.storage.timestamp.v1.Timestamp
}
var file_controller_storage_alias_target_store_v1_alias_proto_depIdxs = []int32{
1, // 0: controller.storage.alias.target.store.v1.Alias.create_time:type_name -> controller.storage.timestamp.v1.Timestamp
1, // 1: controller.storage.alias.target.store.v1.Alias.update_time:type_name -> controller.storage.timestamp.v1.Timestamp
2, // [2:2] is the sub-list for method output_type
2, // [2:2] is the sub-list for method input_type
2, // [2:2] is the sub-list for extension type_name
2, // [2:2] is the sub-list for extension extendee
0, // [0:2] is the sub-list for field type_name
}
func init() { file_controller_storage_alias_target_store_v1_alias_proto_init() }
func file_controller_storage_alias_target_store_v1_alias_proto_init() {
if File_controller_storage_alias_target_store_v1_alias_proto != nil {
return
}
if !protoimpl.UnsafeEnabled {
file_controller_storage_alias_target_store_v1_alias_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*Alias); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
}
type x struct{}
out := protoimpl.TypeBuilder{
File: protoimpl.DescBuilder{
GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
RawDescriptor: file_controller_storage_alias_target_store_v1_alias_proto_rawDesc,
NumEnums: 0,
NumMessages: 1,
NumExtensions: 0,
NumServices: 0,
},
GoTypes: file_controller_storage_alias_target_store_v1_alias_proto_goTypes,
DependencyIndexes: file_controller_storage_alias_target_store_v1_alias_proto_depIdxs,
MessageInfos: file_controller_storage_alias_target_store_v1_alias_proto_msgTypes,
}.Build()
File_controller_storage_alias_target_store_v1_alias_proto = out.File
file_controller_storage_alias_target_store_v1_alias_proto_rawDesc = nil
file_controller_storage_alias_target_store_v1_alias_proto_goTypes = nil
file_controller_storage_alias_target_store_v1_alias_proto_depIdxs = nil
}

@ -0,0 +1,24 @@
// Copyright (c) HashiCorp, Inc.
// SPDX-License-Identifier: BUSL-1.1
package target
import (
"context"
"testing"
"github.com/hashicorp/boundary/internal/db"
"github.com/stretchr/testify/require"
)
func TestAlias(t *testing.T, rw *db.Db, alias string, opt ...Option) *Alias {
t.Helper()
ctx := context.Background()
a, err := NewAlias(ctx, "global", alias, opt...)
require.NoError(t, err)
a.PublicId, err = newAliasId(ctx)
require.NoError(t, err)
require.NoError(t, rw.Create(ctx, a, db.WithDebug(true)))
return a
}

@ -0,0 +1,38 @@
// Copyright (c) HashiCorp, Inc.
// SPDX-License-Identifier: BUSL-1.1
package target_test
import (
"context"
"testing"
atar "github.com/hashicorp/boundary/internal/alias/target"
"github.com/hashicorp/boundary/internal/db"
"github.com/hashicorp/boundary/internal/iam"
"github.com/hashicorp/boundary/internal/target/tcp"
"github.com/stretchr/testify/assert"
)
func TestTestAlias(t *testing.T) {
conn, _ := db.TestSetup(t, "postgres")
rw := db.New(conn)
wrapper := db.TestWrapper(t)
iamRepo := iam.TestRepo(t, conn, wrapper)
_, proj := iam.TestScopes(t, iamRepo)
tar := tcp.TestTarget(context.Background(), t, conn, proj.GetPublicId(), "test target")
a := atar.TestAlias(t, rw, "example.alias",
atar.WithDescription("description"),
atar.WithName("name"),
atar.WithDestinationId(tar.GetPublicId()),
atar.WithHostId("hst_1234567890"))
assert.Equal(t, "example.alias", a.GetValue())
assert.Equal(t, "description", a.GetDescription())
assert.Equal(t, "name", a.GetName())
assert.Equal(t, tar.GetPublicId(), a.GetDestinationId())
assert.Equal(t, "hst_1234567890", a.GetHostId())
assert.Equal(t, "global", a.GetScopeId())
assert.NotEmpty(t, a.GetPublicId())
}

@ -0,0 +1,32 @@
// Copyright (c) HashiCorp, Inc.
// SPDX-License-Identifier: BUSL-1.1
package aliases
import (
"github.com/hashicorp/boundary/internal/types/action"
"github.com/hashicorp/boundary/internal/types/resource"
)
var (
// IdActions contains the set of actions that can be performed on
// individual resources
IdActions = action.NewActionSet(
action.NoOp,
action.Read,
action.Update,
action.Delete,
)
// CollectionActions contains the set of actions that can be performed on
// this collection
CollectionActions = action.NewActionSet(
action.Create,
action.List,
)
)
func init() {
// TODO: refactor to remove idActionsMap and CollectionActions package variables
action.RegisterResource(resource.Alias, action.Union(IdActions), CollectionActions)
}

@ -14,6 +14,7 @@ import (
"github.com/hashicorp/boundary/internal/daemon/controller/common"
"github.com/hashicorp/boundary/internal/daemon/controller/common/scopeids"
"github.com/hashicorp/boundary/internal/daemon/controller/handlers"
"github.com/hashicorp/boundary/internal/daemon/controller/handlers/aliases"
"github.com/hashicorp/boundary/internal/daemon/controller/handlers/authmethods"
"github.com/hashicorp/boundary/internal/daemon/controller/handlers/authtokens"
"github.com/hashicorp/boundary/internal/daemon/controller/handlers/credentialstores"
@ -76,6 +77,7 @@ var (
// TODO: get this from action registry
scopeCollectionTypeMapMap = map[string]map[resource.Type]action.ActionSet{
scope.Global.String(): {
resource.Alias: aliases.CollectionActions,
resource.AuthMethod: authmethods.CollectionActions,
resource.StorageBucket: storage_buckets.CollectionActions,
resource.AuthToken: authtokens.CollectionActions,

@ -81,6 +81,12 @@ func createDefaultScopesRepoAndKms(t *testing.T) (*iam.Scope, *iam.Scope, func()
}
var globalAuthorizedCollectionActions = map[string]*structpb.ListValue{
"aliases": {
Values: []*structpb.Value{
structpb.NewStringValue("create"),
structpb.NewStringValue("list"),
},
},
"auth-methods": {
Values: []*structpb.Value{
structpb.NewStringValue("create"),

@ -63,7 +63,7 @@ func Test_newRateLimiterConfig(t *testing.T) {
ratelimit.DefaultLimiterMaxQuotas(),
false,
&rateLimiterConfig{
maxSize: 324162,
maxSize: 336168,
configs: nil,
disabled: false,
limits: defaultLimits,

@ -6,6 +6,164 @@
"data": {
"data": {
"limits": {
"alias": {
"create": [
{
"action": "create",
"limit": 30000,
"per": "ip-address",
"period": "30s",
"resource": "alias",
"unlimited": false
},
{
"action": "create",
"limit": 30000,
"per": "total",
"period": "30s",
"resource": "alias",
"unlimited": false
},
{
"action": "create",
"limit": 3000,
"per": "auth-token",
"period": "30s",
"resource": "alias",
"unlimited": false
}
],
"delete": [
{
"action": "delete",
"limit": 30000,
"per": "total",
"period": "30s",
"resource": "alias",
"unlimited": false
},
{
"action": "delete",
"limit": 30000,
"per": "ip-address",
"period": "30s",
"resource": "alias",
"unlimited": false
},
{
"action": "delete",
"limit": 3000,
"per": "auth-token",
"period": "30s",
"resource": "alias",
"unlimited": false
}
],
"list": [
{
"action": "list",
"limit": 150,
"per": "auth-token",
"period": "30s",
"resource": "alias",
"unlimited": false
},
{
"action": "list",
"limit": 1500,
"per": "total",
"period": "30s",
"resource": "alias",
"unlimited": false
},
{
"action": "list",
"limit": 1500,
"per": "ip-address",
"period": "30s",
"resource": "alias",
"unlimited": false
}
],
"no-op": [
{
"action": "no-op",
"limit": 3000,
"per": "auth-token",
"period": "30s",
"resource": "alias",
"unlimited": false
},
{
"action": "no-op",
"limit": 30000,
"per": "ip-address",
"period": "30s",
"resource": "alias",
"unlimited": false
},
{
"action": "no-op",
"limit": 30000,
"per": "total",
"period": "30s",
"resource": "alias",
"unlimited": false
}
],
"read": [
{
"action": "read",
"limit": 30000,
"per": "ip-address",
"period": "30s",
"resource": "alias",
"unlimited": false
},
{
"action": "read",
"limit": 3000,
"per": "auth-token",
"period": "30s",
"resource": "alias",
"unlimited": false
},
{
"action": "read",
"limit": 30000,
"per": "total",
"period": "30s",
"resource": "alias",
"unlimited": false
}
],
"update": [
{
"action": "update",
"limit": 3000,
"per": "auth-token",
"period": "30s",
"resource": "alias",
"unlimited": false
},
{
"action": "update",
"limit": 30000,
"per": "total",
"period": "30s",
"resource": "alias",
"unlimited": false
},
{
"action": "update",
"limit": 30000,
"per": "ip-address",
"period": "30s",
"resource": "alias",
"unlimited": false
}
]
},
"account": {
"change-password": [
{
@ -4075,7 +4233,7 @@
]
}
},
"max_size": 324162,
"max_size": 336168,
"msg": "controller api rate limiter"
},
"op": "controller.(rateLimiterConfig).writeSysEvent",

@ -4075,7 +4075,7 @@
]
}
},
"max_size": 324162,
"max_size": 336168,
"msg": "controller api rate limiter"
},
"op": "controller.(rateLimiterConfig).writeSysEvent",

@ -928,7 +928,7 @@ func Test_AnonRestrictions(t *testing.T) {
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
require, assert := require.New(t), assert.New(t)
for i := resource.Type(1); i <= resource.Policy; i++ {
for i := resource.Type(1); i <= resource.Alias; i++ {
if i == resource.Controller || i == resource.Worker {
continue
}

@ -119,7 +119,7 @@ func Test_ValidateType(t *testing.T) {
t.Parallel()
ctx := context.Background()
var g Grant
for i := resource.Unknown; i <= resource.Policy; i++ {
for i := resource.Unknown; i <= resource.Alias; i++ {
g.typ = i
if i == resource.Controller {
assert.Error(t, g.validateType(ctx))

@ -0,0 +1,59 @@
// Copyright (c) HashiCorp, Inc.
// SPDX-License-Identifier: BUSL-1.1
syntax = "proto3";
package controller.storage.alias.target.store.v1;
import "controller/storage/timestamp/v1/timestamp.proto";
option go_package = "github.com/hashicorp/boundary/internal/alias/target/store;store";
message Alias {
// public_id is used to access the auth token via an API
// @inject_tag: gorm:"primary_key"
string public_id = 1;
// The scope_id of the owning scope and must be set.
// @inject_tag: `gorm:"not_null"`
string scope_id = 2;
// create_time from the RDBMS
// @inject_tag: `gorm:"default:current_timestamp"`
timestamp.v1.Timestamp create_time = 3;
// update_time from the RDBMS
// @inject_tag: `gorm:"default:current_timestamp"`
timestamp.v1.Timestamp update_time = 4;
// name is optional
// @inject_tag: `gorm:"default:null"`
string name = 5;
// description is optional.
// @inject_tag: `gorm:"default:null"`
string description = 6;
// version allows optimistic locking of the resource
// @inject_tag: `gorm:"default:null"`
uint32 version = 7;
// value is required and is the value of the value.
// @inject_tag: `gorm:"not_null"`
string value = 8;
// destination_id is optional and is the ID of the target this alias points
// to.
// @inject_tag: `gorm:"default:null"`
string destination_id = 9;
// host_id is optional and can only be set if destination_id is also set.
// The value of this field is used when performing an authorize-session
// action using this alias and specifies the host id to which the session
// is authorized on the target if possible. The host id this points does not
// need to be associated with the target yet nor even exist. If that is the
// case then the authorize-session action will return an error when using
// this alias.
// @inject_tag: `gorm:"default:null"`
string host_id = 10;
}

@ -36,6 +36,7 @@ const (
StorageBucket
Policy
Billing
Alias
// NOTE: When adding a new type, be sure to update:
//
// * The Grant.validateType function and test
@ -76,6 +77,7 @@ func (r Type) String() string {
"storage-bucket",
"policy",
"billing",
"alias",
}[r]
}
@ -87,6 +89,8 @@ func (r Type) PluralString() string {
return "policies"
case Billing: // never pluralized
return "billing"
case Alias:
return "aliases"
default:
return r.String() + "s"
}
@ -100,6 +104,8 @@ func FromPlural(s string) (Type, bool) {
return Policy, true
case "billing":
return Billing, true
case "aliases":
return Alias, true
default:
t, ok := Map[strings.TrimSuffix(s, "s")]
return t, ok
@ -131,6 +137,7 @@ var Map = map[string]Type{
StorageBucket.String(): StorageBucket,
Policy.String(): Policy,
Billing.String(): Billing,
Alias.String(): Alias,
}
// Parent returns the parent type for a given type; if there is no parent, it
@ -174,6 +181,7 @@ func TopLevelType(typ Type) bool {
User,
StorageBucket,
Policy,
Alias,
Worker:
return true
}

@ -91,6 +91,11 @@ func Test_Resource(t *testing.T) {
want: Worker,
topLevelType: true,
},
{
typeString: "alias",
want: Alias,
topLevelType: true,
},
{
typeString: "session",
want: Session,

Loading…
Cancel
Save