mirror of https://github.com/hashicorp/boundary
feature (events): Classify auth method request/resp messages for audit events. (#1640)
* refactor (oidc): Stop emitting error for not found token id Stop emitting errors when authtoken repo.IssueAuthToken is called and there's if no pending token. It's not an "normal" state and not an error condition. * feature (audit tagging): Add tags for auth method requests/responses Including testing functions for asserting audit events created when a service is calledpull/1644/head
parent
3de16b6e21
commit
a679300b50
@ -0,0 +1,80 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"io/ioutil"
|
||||
"testing"
|
||||
|
||||
"github.com/hashicorp/eventlogger/filters/encrypt"
|
||||
"github.com/hashicorp/eventlogger/formatter_filters/cloudevents"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
// CloudEventFromFile will marshal a single cloud event from the provided file
|
||||
// name
|
||||
func CloudEventFromFile(t *testing.T, fileName string) *cloudevents.Event {
|
||||
t.Helper()
|
||||
b, err := ioutil.ReadFile(fileName)
|
||||
assert.NoError(t, err)
|
||||
got := &cloudevents.Event{}
|
||||
err = json.Unmarshal(b, got)
|
||||
require.NoErrorf(t, err, "json: %s", string(b))
|
||||
return got
|
||||
}
|
||||
|
||||
// GetEventDetails is a testing helper will return the details from the event
|
||||
// payload for a given messageType (request or response)
|
||||
func GetEventDetails(t *testing.T, e *cloudevents.Event, messageType string) map[string]interface{} {
|
||||
t.Helper()
|
||||
require := require.New(t)
|
||||
require.NotNil(e)
|
||||
require.NotEmpty(messageType)
|
||||
data, ok := e.Data.(map[string]interface{})
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
msgType, ok := data[messageType].(map[string]interface{})
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
|
||||
details, ok := msgType["details"].(map[string]interface{})
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
return details
|
||||
}
|
||||
|
||||
// AssertRedactedValues will assert that the values for the given keys within
|
||||
// the data have been redacted
|
||||
func AssertRedactedValues(t *testing.T, data interface{}, keys ...string) {
|
||||
t.Helper()
|
||||
assert, require := assert.New(t), require.New(t)
|
||||
require.NotNil(data)
|
||||
dataMap, ok := data.(map[string]interface{})
|
||||
require.Truef(ok, "data must be a map[string]interface{}")
|
||||
|
||||
rMap := make(map[string]bool, len(keys))
|
||||
for _, s := range keys {
|
||||
rMap[s] = true
|
||||
}
|
||||
for k, v := range dataMap {
|
||||
switch typ := v.(type) {
|
||||
case []interface{}:
|
||||
for _, s := range typ {
|
||||
if _, ok := rMap[k]; ok {
|
||||
assert.Equalf(encrypt.RedactedData, s, "expected %s to be redacted and it was set to: %s", k, v)
|
||||
} else {
|
||||
assert.NotEqualf(encrypt.RedactedData, s, "did not expect %s to be redacted", k)
|
||||
}
|
||||
}
|
||||
default:
|
||||
if _, ok := rMap[k]; ok {
|
||||
assert.Equalf(encrypt.RedactedData, v, "expected %s to be redacted and it was set to: %s", k, v)
|
||||
} else {
|
||||
assert.NotEqualf(encrypt.RedactedData, v, "did not expect %s to be redacted", k)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,23 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"testing"
|
||||
|
||||
"github.com/hashicorp/eventlogger/filters/encrypt"
|
||||
wrapping "github.com/hashicorp/go-kms-wrapping"
|
||||
"google.golang.org/protobuf/types/known/fieldmaskpb"
|
||||
)
|
||||
|
||||
// NewEncryptFilter is a copy of event.NewEncryptFilter since importing it would
|
||||
// case circular deps. The primary reason for this test func is to make sure
|
||||
// the proper IgnoreTypes are included for testing.
|
||||
func NewEncryptFilter(t *testing.T, w wrapping.Wrapper) *encrypt.Filter {
|
||||
t.Helper()
|
||||
return &encrypt.Filter{
|
||||
Wrapper: w,
|
||||
IgnoreTypes: []reflect.Type{
|
||||
reflect.TypeOf(&fieldmaskpb.FieldMask{}),
|
||||
},
|
||||
}
|
||||
}
|
||||
Loading…
Reference in new issue