Merge branch 'main' into raft-wal

This commit is contained in:
Josh Black
2024-01-08 14:49:51 -08:00
106 changed files with 2934 additions and 693 deletions

View File

@@ -510,7 +510,7 @@ jobs:
- run: |
rm -rf test-results/go-test/logs
ls -lhR test-results/go-test
find test-results/go-test -mindepth 1 -mtime +3 -delete
find test-results/go-test -mindepth 1 -type f -mtime +3 -delete
# Prune invalid timing files
find test-results/go-test -mindepth 1 -type f -name "*.json" -exec sh -c '

View File

@@ -279,6 +279,11 @@ type Backend interface {
// nodes for node and pipeline registration.
event.PipelineReader
// IsFallback can be used to determine if this audit backend device is intended to
// be used as a fallback to catch all events that are not written when only using
// filtered pipelines.
IsFallback() bool
// LogRequest is used to synchronously log a request. This is done after the
// request is authorized but before the request is executed. The arguments
// MUST not be modified in any way. They should be deep copied if this is

View File

@@ -16,6 +16,7 @@ import (
"sync/atomic"
"github.com/hashicorp/eventlogger"
"github.com/hashicorp/go-secure-stdlib/parseutil"
"github.com/hashicorp/vault/audit"
"github.com/hashicorp/vault/internal/observability/event"
"github.com/hashicorp/vault/sdk/helper/salt"
@@ -36,6 +37,7 @@ var _ audit.Backend = (*Backend)(nil)
// or reset the write cursor, this should be done in the future.
type Backend struct {
f *os.File
fallback bool
fileLock sync.RWMutex
formatter *audit.EntryFormatterWriter
formatConfig audit.FormatterConfig
@@ -60,6 +62,21 @@ func Factory(_ context.Context, conf *audit.BackendConfig, useEventLogger bool,
return nil, fmt.Errorf("%s: nil salt view", op)
}
// The config options 'fallback' and 'filter' are mutually exclusive, a fallback
// device catches everything, so it cannot be allowed to filter.
var fallback bool
var err error
if fallbackRaw, ok := conf.Config["fallback"]; ok {
fallback, err = parseutil.ParseBool(fallbackRaw)
if err != nil {
return nil, fmt.Errorf("%s: unable to parse 'fallback': %w", op, err)
}
}
if _, ok := conf.Config["filter"]; ok && fallback {
return nil, fmt.Errorf("%s: cannot configure a fallback device with a filter: %w", op, event.ErrInvalidParameter)
}
// Get file path from config or fall back to the old option name ('path') for compatibility
// (see commit bac4fe0799a372ba1245db642f3f6cd1f1d02669).
var filePath string
@@ -106,6 +123,7 @@ func Factory(_ context.Context, conf *audit.BackendConfig, useEventLogger bool,
}
b := &Backend{
fallback: fallback,
filePath: filePath,
formatConfig: cfg,
mode: mode,
@@ -550,3 +568,10 @@ func (b *Backend) EventType() eventlogger.EventType {
func (b *Backend) HasFiltering() bool {
return len(b.nodeIDList) > 0 && b.nodeMap[b.nodeIDList[0]].Type() == eventlogger.NodeTypeFilter
}
// IsFallback can be used to determine if this audit backend device is intended to
// be used as a fallback to catch all events that are not written when only using
// filtered pipelines.
func (b *Backend) IsFallback() bool {
return b.fallback
}

View File

@@ -576,3 +576,129 @@ func TestBackend_configureFilterFormatterSink(t *testing.T) {
node = b.nodeMap[id]
require.Equal(t, eventlogger.NodeTypeSink, node.Type())
}
// TestBackend_Factory_Conf is used to ensure that any configuration which is
// supplied, is validated and tested.
func TestBackend_Factory_Conf(t *testing.T) {
t.Parallel()
ctx := context.Background()
tests := map[string]struct {
backendConfig *audit.BackendConfig
isErrorExpected bool
expectedErrorMessage string
}{
"nil-salt-config": {
backendConfig: &audit.BackendConfig{
SaltConfig: nil,
},
isErrorExpected: true,
expectedErrorMessage: "file.Factory: nil salt config",
},
"nil-salt-view": {
backendConfig: &audit.BackendConfig{
SaltConfig: &salt.Config{},
},
isErrorExpected: true,
expectedErrorMessage: "file.Factory: nil salt view",
},
"fallback-device-with-filter": {
backendConfig: &audit.BackendConfig{
MountPath: "discard",
SaltConfig: &salt.Config{},
SaltView: &logical.InmemStorage{},
Config: map[string]string{
"fallback": "true",
"file_path": discard,
"filter": "mount_type == kv",
},
},
isErrorExpected: true,
expectedErrorMessage: "file.Factory: cannot configure a fallback device with a filter: invalid parameter",
},
"non-fallback-device-with-filter": {
backendConfig: &audit.BackendConfig{
MountPath: "discard",
SaltConfig: &salt.Config{},
SaltView: &logical.InmemStorage{},
Config: map[string]string{
"fallback": "false",
"file_path": discard,
"filter": "mount_type == kv",
},
},
isErrorExpected: false,
},
}
for name, tc := range tests {
name := name
tc := tc
t.Run(name, func(t *testing.T) {
t.Parallel()
be, err := Factory(ctx, tc.backendConfig, true, nil)
switch {
case tc.isErrorExpected:
require.Error(t, err)
require.EqualError(t, err, tc.expectedErrorMessage)
default:
require.NoError(t, err)
require.NotNil(t, be)
}
})
}
}
// TestBackend_IsFallback ensures that the 'fallback' config setting is parsed
// and set correctly, then exposed via the interface method IsFallback().
func TestBackend_IsFallback(t *testing.T) {
t.Parallel()
ctx := context.Background()
tests := map[string]struct {
backendConfig *audit.BackendConfig
isFallbackExpected bool
}{
"fallback": {
backendConfig: &audit.BackendConfig{
MountPath: "discard",
SaltConfig: &salt.Config{},
SaltView: &logical.InmemStorage{},
Config: map[string]string{
"fallback": "true",
"file_path": discard,
},
},
isFallbackExpected: true,
},
"no-fallback": {
backendConfig: &audit.BackendConfig{
MountPath: "discard",
SaltConfig: &salt.Config{},
SaltView: &logical.InmemStorage{},
Config: map[string]string{
"fallback": "false",
"file_path": discard,
},
},
isFallbackExpected: false,
},
}
for name, tc := range tests {
name := name
tc := tc
t.Run(name, func(t *testing.T) {
t.Parallel()
be, err := Factory(ctx, tc.backendConfig, true, nil)
require.NoError(t, err)
require.NotNil(t, be)
require.Equal(t, tc.isFallbackExpected, be.IsFallback())
})
}
}

View File

@@ -29,6 +29,7 @@ type Backend struct {
sync.Mutex
address string
connection net.Conn
fallback bool
formatter *audit.EntryFormatterWriter
formatConfig audit.FormatterConfig
name string
@@ -73,12 +74,27 @@ func Factory(_ context.Context, conf *audit.BackendConfig, useEventLogger bool,
return nil, fmt.Errorf("%s: failed to parse 'write_timeout': %w", op, err)
}
// The config options 'fallback' and 'filter' are mutually exclusive, a fallback
// device catches everything, so it cannot be allowed to filter.
var fallback bool
if fallbackRaw, ok := conf.Config["fallback"]; ok {
fallback, err = parseutil.ParseBool(fallbackRaw)
if err != nil {
return nil, fmt.Errorf("%s: unable to parse 'fallback': %w", op, err)
}
}
if _, ok := conf.Config["filter"]; ok && fallback {
return nil, fmt.Errorf("%s: cannot configure a fallback device with a filter: %w", op, event.ErrInvalidParameter)
}
cfg, err := formatterConfig(conf.Config)
if err != nil {
return nil, fmt.Errorf("%s: failed to create formatter config: %w", op, err)
}
b := &Backend{
fallback: fallback,
address: address,
formatConfig: cfg,
name: conf.MountPath,
@@ -443,3 +459,10 @@ func (b *Backend) EventType() eventlogger.EventType {
func (b *Backend) HasFiltering() bool {
return len(b.nodeIDList) > 0 && b.nodeMap[b.nodeIDList[0]].Type() == eventlogger.NodeTypeFilter
}
// IsFallback can be used to determine if this audit backend device is intended to
// be used as a fallback to catch all events that are not written when only using
// filtered pipelines.
func (b *Backend) IsFallback() bool {
return b.fallback
}

View File

@@ -4,10 +4,13 @@
package socket
import (
"context"
"testing"
"github.com/hashicorp/eventlogger"
"github.com/hashicorp/vault/audit"
"github.com/hashicorp/vault/sdk/helper/salt"
"github.com/hashicorp/vault/sdk/logical"
"github.com/stretchr/testify/require"
)
@@ -329,3 +332,192 @@ func TestBackend_configureFilterFormatterSink(t *testing.T) {
node = b.nodeMap[id]
require.Equal(t, eventlogger.NodeTypeSink, node.Type())
}
// TestBackend_Factory_Conf is used to ensure that any configuration which is
// supplied, is validated and tested.
func TestBackend_Factory_Conf(t *testing.T) {
t.Parallel()
ctx := context.Background()
tests := map[string]struct {
backendConfig *audit.BackendConfig
isErrorExpected bool
expectedErrorMessage string
}{
"nil-salt-config": {
backendConfig: &audit.BackendConfig{
SaltConfig: nil,
},
isErrorExpected: true,
expectedErrorMessage: "socket.Factory: nil salt config",
},
"nil-salt-view": {
backendConfig: &audit.BackendConfig{
SaltConfig: &salt.Config{},
},
isErrorExpected: true,
expectedErrorMessage: "socket.Factory: nil salt view",
},
"no-address": {
backendConfig: &audit.BackendConfig{
MountPath: "discard",
SaltConfig: &salt.Config{},
SaltView: &logical.InmemStorage{},
Config: map[string]string{},
},
isErrorExpected: true,
expectedErrorMessage: "socket.Factory: address is required",
},
"empty-address": {
backendConfig: &audit.BackendConfig{
MountPath: "discard",
SaltConfig: &salt.Config{},
SaltView: &logical.InmemStorage{},
Config: map[string]string{
"address": "",
},
},
isErrorExpected: true,
expectedErrorMessage: "socket.Factory: error configuring sink node: socket.(Backend).configureSinkNode: address is required: invalid parameter",
},
"whitespace-address": {
backendConfig: &audit.BackendConfig{
MountPath: "discard",
SaltConfig: &salt.Config{},
SaltView: &logical.InmemStorage{},
Config: map[string]string{
"address": " ",
},
},
isErrorExpected: true,
expectedErrorMessage: "socket.Factory: error configuring sink node: socket.(Backend).configureSinkNode: address is required: invalid parameter",
},
"write-duration-valid": {
backendConfig: &audit.BackendConfig{
MountPath: "discard",
SaltConfig: &salt.Config{},
SaltView: &logical.InmemStorage{},
Config: map[string]string{
"address": "hashicorp.com",
"write_timeout": "5s",
},
},
isErrorExpected: false,
},
"write-duration-not-valid": {
backendConfig: &audit.BackendConfig{
MountPath: "discard",
SaltConfig: &salt.Config{},
SaltView: &logical.InmemStorage{},
Config: map[string]string{
"address": "hashicorp.com",
"write_timeout": "qwerty",
},
},
isErrorExpected: true,
expectedErrorMessage: "socket.Factory: failed to parse 'write_timeout': time: invalid duration \"qwerty\"",
},
"non-fallback-device-with-filter": {
backendConfig: &audit.BackendConfig{
MountPath: "discard",
SaltConfig: &salt.Config{},
SaltView: &logical.InmemStorage{},
Config: map[string]string{
"address": "hashicorp.com",
"write_timeout": "5s",
"fallback": "false",
"filter": "mount_type == kv",
},
},
isErrorExpected: false,
},
"fallback-device-with-filter": {
backendConfig: &audit.BackendConfig{
MountPath: "discard",
SaltConfig: &salt.Config{},
SaltView: &logical.InmemStorage{},
Config: map[string]string{
"address": "hashicorp.com",
"write_timeout": "2s",
"fallback": "true",
"filter": "mount_type == kv",
},
},
isErrorExpected: true,
expectedErrorMessage: "socket.Factory: cannot configure a fallback device with a filter: invalid parameter",
},
}
for name, tc := range tests {
name := name
tc := tc
t.Run(name, func(t *testing.T) {
t.Parallel()
be, err := Factory(ctx, tc.backendConfig, true, nil)
switch {
case tc.isErrorExpected:
require.Error(t, err)
require.EqualError(t, err, tc.expectedErrorMessage)
default:
require.NoError(t, err)
require.NotNil(t, be)
}
})
}
}
// TestBackend_IsFallback ensures that the 'fallback' config setting is parsed
// and set correctly, then exposed via the interface method IsFallback().
func TestBackend_IsFallback(t *testing.T) {
t.Parallel()
ctx := context.Background()
tests := map[string]struct {
backendConfig *audit.BackendConfig
isFallbackExpected bool
}{
"fallback": {
backendConfig: &audit.BackendConfig{
MountPath: "qwerty",
SaltConfig: &salt.Config{},
SaltView: &logical.InmemStorage{},
Config: map[string]string{
"fallback": "true",
"address": "hashicorp.com",
"write_timeout": "5s",
},
},
isFallbackExpected: true,
},
"no-fallback": {
backendConfig: &audit.BackendConfig{
MountPath: "qwerty",
SaltConfig: &salt.Config{},
SaltView: &logical.InmemStorage{},
Config: map[string]string{
"fallback": "false",
"address": "hashicorp.com",
"write_timeout": "5s",
},
},
isFallbackExpected: false,
},
}
for name, tc := range tests {
name := name
tc := tc
t.Run(name, func(t *testing.T) {
t.Parallel()
be, err := Factory(ctx, tc.backendConfig, true, nil)
require.NoError(t, err)
require.NotNil(t, be)
require.Equal(t, tc.isFallbackExpected, be.IsFallback())
})
}
}

View File

@@ -12,6 +12,7 @@ import (
"sync"
"github.com/hashicorp/eventlogger"
"github.com/hashicorp/go-secure-stdlib/parseutil"
gsyslog "github.com/hashicorp/go-syslog"
"github.com/hashicorp/vault/audit"
"github.com/hashicorp/vault/internal/observability/event"
@@ -23,6 +24,7 @@ var _ audit.Backend = (*Backend)(nil)
// Backend is the audit backend for the syslog-based audit store.
type Backend struct {
fallback bool
formatter *audit.EntryFormatterWriter
formatConfig audit.FormatterConfig
logger gsyslog.Syslogger
@@ -58,6 +60,21 @@ func Factory(_ context.Context, conf *audit.BackendConfig, useEventLogger bool,
tag = "vault"
}
// The config options 'fallback' and 'filter' are mutually exclusive, a fallback
// device catches everything, so it cannot be allowed to filter.
var fallback bool
var err error
if fallbackRaw, ok := conf.Config["fallback"]; ok {
fallback, err = parseutil.ParseBool(fallbackRaw)
if err != nil {
return nil, fmt.Errorf("%s: unable to parse 'fallback': %w", op, err)
}
}
if _, ok := conf.Config["filter"]; ok && fallback {
return nil, fmt.Errorf("%s: cannot configure a fallback device with a filter: %w", op, event.ErrInvalidParameter)
}
cfg, err := formatterConfig(conf.Config)
if err != nil {
return nil, fmt.Errorf("%s: failed to create formatter config: %w", op, err)
@@ -70,6 +87,7 @@ func Factory(_ context.Context, conf *audit.BackendConfig, useEventLogger bool,
}
b := &Backend{
fallback: fallback,
formatConfig: cfg,
logger: logger,
name: conf.MountPath,
@@ -347,3 +365,10 @@ func (b *Backend) EventType() eventlogger.EventType {
func (b *Backend) HasFiltering() bool {
return len(b.nodeIDList) > 0 && b.nodeMap[b.nodeIDList[0]].Type() == eventlogger.NodeTypeFilter
}
// IsFallback can be used to determine if this audit backend device is intended to
// be used as a fallback to catch all events that are not written when only using
// filtered pipelines.
func (b *Backend) IsFallback() bool {
return b.fallback
}

View File

@@ -4,10 +4,13 @@
package syslog
import (
"context"
"testing"
"github.com/hashicorp/eventlogger"
"github.com/hashicorp/vault/audit"
"github.com/hashicorp/vault/sdk/helper/salt"
"github.com/hashicorp/vault/sdk/logical"
"github.com/stretchr/testify/require"
)
@@ -311,3 +314,125 @@ func TestBackend_configureFilterFormatterSink(t *testing.T) {
node = b.nodeMap[id]
require.Equal(t, eventlogger.NodeTypeSink, node.Type())
}
// TestBackend_Factory_Conf is used to ensure that any configuration which is
// supplied, is validated and tested.
func TestBackend_Factory_Conf(t *testing.T) {
t.Parallel()
ctx := context.Background()
tests := map[string]struct {
backendConfig *audit.BackendConfig
isErrorExpected bool
expectedErrorMessage string
}{
"nil-salt-config": {
backendConfig: &audit.BackendConfig{
SaltConfig: nil,
},
isErrorExpected: true,
expectedErrorMessage: "syslog.Factory: nil salt config",
},
"nil-salt-view": {
backendConfig: &audit.BackendConfig{
SaltConfig: &salt.Config{},
},
isErrorExpected: true,
expectedErrorMessage: "syslog.Factory: nil salt view",
},
"non-fallback-device-with-filter": {
backendConfig: &audit.BackendConfig{
MountPath: "discard",
SaltConfig: &salt.Config{},
SaltView: &logical.InmemStorage{},
Config: map[string]string{
"fallback": "false",
"filter": "mount_type == kv",
},
},
isErrorExpected: false,
},
"fallback-device-with-filter": {
backendConfig: &audit.BackendConfig{
MountPath: "discard",
SaltConfig: &salt.Config{},
SaltView: &logical.InmemStorage{},
Config: map[string]string{
"fallback": "true",
"filter": "mount_type == kv",
},
},
isErrorExpected: true,
expectedErrorMessage: "syslog.Factory: cannot configure a fallback device with a filter: invalid parameter",
},
}
for name, tc := range tests {
name := name
tc := tc
t.Run(name, func(t *testing.T) {
t.Parallel()
be, err := Factory(ctx, tc.backendConfig, true, nil)
switch {
case tc.isErrorExpected:
require.Error(t, err)
require.EqualError(t, err, tc.expectedErrorMessage)
default:
require.NoError(t, err)
require.NotNil(t, be)
}
})
}
}
// TestBackend_IsFallback ensures that the 'fallback' config setting is parsed
// and set correctly, then exposed via the interface method IsFallback().
func TestBackend_IsFallback(t *testing.T) {
t.Parallel()
ctx := context.Background()
tests := map[string]struct {
backendConfig *audit.BackendConfig
isFallbackExpected bool
}{
"fallback": {
backendConfig: &audit.BackendConfig{
MountPath: "qwerty",
SaltConfig: &salt.Config{},
SaltView: &logical.InmemStorage{},
Config: map[string]string{
"fallback": "true",
},
},
isFallbackExpected: true,
},
"no-fallback": {
backendConfig: &audit.BackendConfig{
MountPath: "qwerty",
SaltConfig: &salt.Config{},
SaltView: &logical.InmemStorage{},
Config: map[string]string{
"fallback": "false",
},
},
isFallbackExpected: false,
},
}
for name, tc := range tests {
name := name
tc := tc
t.Run(name, func(t *testing.T) {
t.Parallel()
be, err := Factory(ctx, tc.backendConfig, true, nil)
require.NoError(t, err)
require.NotNil(t, be)
require.Equal(t, tc.isFallbackExpected, be.IsFallback())
})
}
}

View File

@@ -6,12 +6,15 @@ package database
import (
"context"
"database/sql"
"encoding/json"
"errors"
"fmt"
"log"
"net/url"
"os"
"reflect"
"strings"
"sync"
"testing"
"time"
@@ -35,12 +38,26 @@ import (
"github.com/mitchellh/mapstructure"
)
func getClusterPostgresDBWithFactory(t *testing.T, factory logical.Factory) (*vault.TestCluster, logical.SystemView) {
t.Helper()
cluster, sys := getClusterWithFactory(t, factory)
vault.TestAddTestPlugin(t, cluster.Cores[0].Core, "postgresql-database-plugin", consts.PluginTypeDatabase, "", "TestBackend_PluginMain_PostgresMultiplexed",
[]string{fmt.Sprintf("%s=%s", pluginutil.PluginCACertPEMEnv, cluster.CACertPEMFile)})
return cluster, sys
}
func getClusterPostgresDB(t *testing.T) (*vault.TestCluster, logical.SystemView) {
t.Helper()
cluster, sys := getClusterPostgresDBWithFactory(t, Factory)
return cluster, sys
}
func getClusterWithFactory(t *testing.T, factory logical.Factory) (*vault.TestCluster, logical.SystemView) {
t.Helper()
pluginDir := corehelpers.MakeTestPluginDir(t)
coreConfig := &vault.CoreConfig{
LogicalBackends: map[string]logical.Factory{
"database": Factory,
"database": factory,
},
BuiltinRegistry: builtinplugins.Registry,
PluginDirectory: pluginDir,
@@ -53,36 +70,14 @@ func getClusterPostgresDB(t *testing.T) (*vault.TestCluster, logical.SystemView)
cores := cluster.Cores
vault.TestWaitActive(t, cores[0].Core)
os.Setenv(pluginutil.PluginCACertPEMEnv, cluster.CACertPEMFile)
sys := vault.TestDynamicSystemView(cores[0].Core, nil)
vault.TestAddTestPlugin(t, cores[0].Core, "postgresql-database-plugin", consts.PluginTypeDatabase, "", "TestBackend_PluginMain_PostgresMultiplexed", []string{})
return cluster, sys
}
func getCluster(t *testing.T) (*vault.TestCluster, logical.SystemView) {
t.Helper()
pluginDir := corehelpers.MakeTestPluginDir(t)
coreConfig := &vault.CoreConfig{
LogicalBackends: map[string]logical.Factory{
"database": Factory,
},
BuiltinRegistry: builtinplugins.Registry,
PluginDirectory: pluginDir,
}
cluster := vault.NewTestCluster(t, coreConfig, &vault.TestClusterOptions{
HandlerFunc: vaulthttp.Handler,
})
cluster.Start()
cores := cluster.Cores
vault.TestWaitActive(t, cores[0].Core)
os.Setenv(pluginutil.PluginCACertPEMEnv, cluster.CACertPEMFile)
sys := vault.TestDynamicSystemView(cores[0].Core, nil)
cluster, sys := getClusterWithFactory(t, Factory)
return cluster, sys
}
@@ -515,7 +510,7 @@ func TestBackend_basic(t *testing.T) {
if credsResp.Secret.TTL != 5*time.Minute {
t.Fatalf("unexpected TTL of %d", credsResp.Secret.TTL)
}
if !testCredsExist(t, credsResp, connURL) {
if !testCredsExist(t, credsResp.Data, connURL) {
t.Fatalf("Creds should exist")
}
@@ -535,7 +530,7 @@ func TestBackend_basic(t *testing.T) {
t.Fatalf("err:%s resp:%#v\n", err, resp)
}
if testCredsExist(t, credsResp, connURL) {
if testCredsExist(t, credsResp.Data, connURL) {
t.Fatalf("Creds should not exist")
}
}
@@ -553,7 +548,7 @@ func TestBackend_basic(t *testing.T) {
if err != nil || (credsResp != nil && credsResp.IsError()) {
t.Fatalf("err:%s resp:%#v\n", err, credsResp)
}
if !testCredsExist(t, credsResp, connURL) {
if !testCredsExist(t, credsResp.Data, connURL) {
t.Fatalf("Creds should exist")
}
@@ -586,108 +581,118 @@ func TestBackend_basic(t *testing.T) {
t.Fatalf("err:%s resp:%#v\n", err, resp)
}
if testCredsExist(t, credsResp, connURL) {
if testCredsExist(t, credsResp.Data, connURL) {
t.Fatalf("Creds should not exist")
}
}
}
func TestBackend_connectionCrud(t *testing.T) {
cluster, sys := getClusterPostgresDB(t)
defer cluster.Cleanup()
// singletonDBFactory allows us to reach into the internals of a databaseBackend
// even when it's been created by a call to the sys mount. The factory method
// satisfies the logical.Factory type, and lazily creates the databaseBackend
// once the SystemView has been provided because the factory method itself is an
// input for creating the test cluster and its system view.
type singletonDBFactory struct {
once sync.Once
db *databaseBackend
sys logical.SystemView
}
// factory satisfies the logical.Factory type.
func (s *singletonDBFactory) factory(context.Context, *logical.BackendConfig) (logical.Backend, error) {
if s.sys == nil {
return nil, errors.New("sys is nil")
}
config := logical.TestBackendConfig()
config.StorageView = &logical.InmemStorage{}
config.System = sys
config.System = s.sys
b, err := Factory(context.Background(), config)
var err error
s.once.Do(func() {
var b logical.Backend
b, err = Factory(context.Background(), config)
s.db = b.(*databaseBackend)
})
if err != nil {
t.Fatal(err)
return nil, err
}
defer b.Cleanup(context.Background())
if s.db == nil {
return nil, errors.New("db is nil")
}
return s.db, nil
}
func TestBackend_connectionCrud(t *testing.T) {
dbFactory := &singletonDBFactory{}
cluster, sys := getClusterPostgresDBWithFactory(t, dbFactory.factory)
defer cluster.Cleanup()
dbFactory.sys = sys
client := cluster.Cores[0].Client.Logical()
cleanup, connURL := postgreshelper.PrepareTestContainer(t, "13.4-buster")
defer cleanup()
// Mount the database plugin.
resp, err := client.Write("sys/mounts/database", map[string]interface{}{
"type": "database",
})
if err != nil {
t.Fatalf("err:%s resp:%#v\n", err, resp)
}
// Configure a connection
data := map[string]interface{}{
resp, err = client.Write("database/config/plugin-test", map[string]interface{}{
"connection_url": "test",
"plugin_name": "postgresql-database-plugin",
"verify_connection": false,
}
req := &logical.Request{
Operation: logical.UpdateOperation,
Path: "config/plugin-test",
Storage: config.StorageView,
Data: data,
}
resp, err := b.HandleRequest(namespace.RootContext(nil), req)
if err != nil || (resp != nil && resp.IsError()) {
})
if err != nil {
t.Fatalf("err:%s resp:%#v\n", err, resp)
}
// Configure a second connection to confirm below it doesn't get restarted.
data = map[string]interface{}{
resp, err = client.Write("database/config/plugin-test-hana", map[string]interface{}{
"connection_url": "test",
"plugin_name": "hana-database-plugin",
"verify_connection": false,
}
req = &logical.Request{
Operation: logical.UpdateOperation,
Path: "config/plugin-test-hana",
Storage: config.StorageView,
Data: data,
}
resp, err = b.HandleRequest(namespace.RootContext(nil), req)
if err != nil || (resp != nil && resp.IsError()) {
})
if err != nil {
t.Fatalf("err:%s resp:%#v\n", err, resp)
}
// Create a role
data = map[string]interface{}{
resp, err = client.Write("database/roles/plugin-role-test", map[string]interface{}{
"db_name": "plugin-test",
"creation_statements": testRole,
"revocation_statements": defaultRevocationSQL,
"default_ttl": "5m",
"max_ttl": "10m",
}
req = &logical.Request{
Operation: logical.UpdateOperation,
Path: "roles/plugin-role-test",
Storage: config.StorageView,
Data: data,
}
resp, err = b.HandleRequest(namespace.RootContext(nil), req)
if err != nil || (resp != nil && resp.IsError()) {
})
if err != nil {
t.Fatalf("err:%s resp:%#v\n", err, resp)
}
// Update the connection
data = map[string]interface{}{
resp, err = client.Write("database/config/plugin-test", map[string]interface{}{
"connection_url": connURL,
"plugin_name": "postgresql-database-plugin",
"allowed_roles": []string{"plugin-role-test"},
"username": "postgres",
"password": "secret",
"private_key": "PRIVATE_KEY",
}
req = &logical.Request{
Operation: logical.UpdateOperation,
Path: "config/plugin-test",
Storage: config.StorageView,
Data: data,
}
resp, err = b.HandleRequest(namespace.RootContext(nil), req)
if err != nil || (resp != nil && resp.IsError()) {
})
if err != nil {
t.Fatalf("err:%s resp:%#v\n", err, resp)
}
if len(resp.Warnings) == 0 {
t.Fatalf("expected warning about password in url %s, resp:%#v\n", connURL, resp)
}
req.Operation = logical.ReadOperation
resp, err = b.HandleRequest(namespace.RootContext(nil), req)
if err != nil || (resp != nil && resp.IsError()) {
resp, err = client.Read("database/config/plugin-test")
if err != nil {
t.Fatalf("err:%s resp:%#v\n", err, resp)
}
returnedConnectionDetails := resp.Data["connection_details"].(map[string]interface{})
@@ -703,11 +708,16 @@ func TestBackend_connectionCrud(t *testing.T) {
}
// Replace connection url with templated version
req.Operation = logical.UpdateOperation
connURL = strings.ReplaceAll(connURL, "postgres:secret", "{{username}}:{{password}}")
data["connection_url"] = connURL
resp, err = b.HandleRequest(namespace.RootContext(nil), req)
if err != nil || (resp != nil && resp.IsError()) {
templatedConnURL := strings.ReplaceAll(connURL, "postgres:secret", "{{username}}:{{password}}")
resp, err = client.Write("database/config/plugin-test", map[string]interface{}{
"connection_url": templatedConnURL,
"plugin_name": "postgresql-database-plugin",
"allowed_roles": []string{"plugin-role-test"},
"username": "postgres",
"password": "secret",
"private_key": "PRIVATE_KEY",
})
if err != nil {
t.Fatalf("err:%s resp:%#v\n", err, resp)
}
@@ -716,36 +726,38 @@ func TestBackend_connectionCrud(t *testing.T) {
"plugin_name": "postgresql-database-plugin",
"connection_details": map[string]interface{}{
"username": "postgres",
"connection_url": connURL,
"connection_url": templatedConnURL,
},
"allowed_roles": []string{"plugin-role-test"},
"root_credentials_rotate_statements": []string(nil),
"allowed_roles": []any{"plugin-role-test"},
"root_credentials_rotate_statements": []any{},
"password_policy": "",
"plugin_version": "",
}
req.Operation = logical.ReadOperation
resp, err = b.HandleRequest(namespace.RootContext(nil), req)
if err != nil || (resp != nil && resp.IsError()) {
resp, err = client.Read("database/config/plugin-test")
if err != nil {
t.Fatalf("err:%s resp:%#v\n", err, resp)
}
delete(resp.Data["connection_details"].(map[string]interface{}), "name")
if diff := deep.Equal(resp.Data, expected); diff != nil {
t.Fatal(diff)
t.Fatal(strings.Join(diff, "\n"))
}
// Test endpoints for reloading plugins.
for _, reloadPath := range []string{
"reset/plugin-test",
"reload/postgresql-database-plugin",
for _, reload := range []struct {
path string
data map[string]any
checkCount bool
}{
{"database/reset/plugin-test", nil, false},
{"database/reload/postgresql-database-plugin", nil, true},
{"sys/plugins/reload/backend", map[string]any{
"plugin": "postgresql-database-plugin",
}, false},
} {
getConnectionID := func(name string) string {
t.Helper()
dbBackend, ok := b.(*databaseBackend)
if !ok {
t.Fatal("could not convert logical.Backend to databaseBackend")
}
dbi := dbBackend.connections.Get(name)
dbi := dbFactory.db.connections.Get(name)
if dbi == nil {
t.Fatal("no plugin-test dbi")
}
@@ -753,14 +765,8 @@ func TestBackend_connectionCrud(t *testing.T) {
}
initialID := getConnectionID("plugin-test")
hanaID := getConnectionID("plugin-test-hana")
req = &logical.Request{
Operation: logical.UpdateOperation,
Path: reloadPath,
Storage: config.StorageView,
Data: map[string]interface{}{},
}
resp, err = b.HandleRequest(namespace.RootContext(nil), req)
if err != nil || (resp != nil && resp.IsError()) {
resp, err = client.Write(reload.path, reload.data)
if err != nil {
t.Fatalf("err:%s resp:%#v\n", err, resp)
}
if initialID == getConnectionID("plugin-test") {
@@ -769,54 +775,43 @@ func TestBackend_connectionCrud(t *testing.T) {
if hanaID != getConnectionID("plugin-test-hana") {
t.Fatal("hana plugin got restarted but shouldn't have been")
}
if strings.HasPrefix(reloadPath, "reload/") {
if expected := 1; expected != resp.Data["count"] {
t.Fatalf("expected %d but got %d", expected, resp.Data["count"])
if reload.checkCount {
actual, err := resp.Data["count"].(json.Number).Int64()
if err != nil {
t.Fatal(err)
}
if expected := []string{"plugin-test"}; !reflect.DeepEqual(expected, resp.Data["connections"]) {
if expected := 1; expected != int(actual) {
t.Fatalf("expected %d but got %d", expected, resp.Data["count"].(int))
}
if expected := []any{"plugin-test"}; !reflect.DeepEqual(expected, resp.Data["connections"]) {
t.Fatalf("expected %v but got %v", expected, resp.Data["connections"])
}
}
}
// Get creds
data = map[string]interface{}{}
req = &logical.Request{
Operation: logical.ReadOperation,
Path: "creds/plugin-role-test",
Storage: config.StorageView,
Data: data,
}
credsResp, err := b.HandleRequest(namespace.RootContext(nil), req)
if err != nil || (credsResp != nil && credsResp.IsError()) {
credsResp, err := client.Read("database/creds/plugin-role-test")
if err != nil {
t.Fatalf("err:%s resp:%#v\n", err, credsResp)
}
credCheckURL := dbutil.QueryHelper(connURL, map[string]string{
credCheckURL := dbutil.QueryHelper(templatedConnURL, map[string]string{
"username": "postgres",
"password": "secret",
})
if !testCredsExist(t, credsResp, credCheckURL) {
if !testCredsExist(t, credsResp.Data, credCheckURL) {
t.Fatalf("Creds should exist")
}
// Delete Connection
data = map[string]interface{}{}
req = &logical.Request{
Operation: logical.DeleteOperation,
Path: "config/plugin-test",
Storage: config.StorageView,
Data: data,
}
resp, err = b.HandleRequest(namespace.RootContext(nil), req)
if err != nil || (resp != nil && resp.IsError()) {
resp, err = client.Delete("database/config/plugin-test")
if err != nil {
t.Fatalf("err:%s resp:%#v\n", err, resp)
}
// Read connection
req.Operation = logical.ReadOperation
resp, err = b.HandleRequest(namespace.RootContext(nil), req)
if err != nil || (resp != nil && resp.IsError()) {
resp, err = client.Read("database/config/plugin-test")
if err != nil {
t.Fatalf("err:%s resp:%#v\n", err, resp)
}
@@ -1190,7 +1185,7 @@ func TestBackend_allowedRoles(t *testing.T) {
t.Fatalf("err:%s resp:%#v\n", err, credsResp)
}
if !testCredsExist(t, credsResp, connURL) {
if !testCredsExist(t, credsResp.Data, connURL) {
t.Fatalf("Creds should exist")
}
@@ -1224,7 +1219,7 @@ func TestBackend_allowedRoles(t *testing.T) {
t.Fatalf("err:%s resp:%#v\n", err, credsResp)
}
if !testCredsExist(t, credsResp, connURL) {
if !testCredsExist(t, credsResp.Data, connURL) {
t.Fatalf("Creds should exist")
}
@@ -1271,7 +1266,7 @@ func TestBackend_allowedRoles(t *testing.T) {
t.Fatalf("err:%s resp:%#v\n", err, credsResp)
}
if !testCredsExist(t, credsResp, connURL) {
if !testCredsExist(t, credsResp.Data, connURL) {
t.Fatalf("Creds should exist")
}
}
@@ -1581,13 +1576,13 @@ func TestNewDatabaseWrapper_IgnoresBuiltinVersion(t *testing.T) {
}
}
func testCredsExist(t *testing.T, resp *logical.Response, connURL string) bool {
func testCredsExist(t *testing.T, data map[string]any, connURL string) bool {
t.Helper()
var d struct {
Username string `mapstructure:"username"`
Password string `mapstructure:"password"`
}
if err := mapstructure.Decode(resp.Data, &d); err != nil {
if err := mapstructure.Decode(data, &d); err != nil {
t.Fatal(err)
}
log.Printf("[TRACE] Generated credentials: %v", d)

View File

@@ -25,9 +25,10 @@ func TestPlugin_lifecycle(t *testing.T) {
cluster, sys := getCluster(t)
defer cluster.Cleanup()
vault.TestAddTestPlugin(t, cluster.Cores[0].Core, "mock-v4-database-plugin", consts.PluginTypeDatabase, "", "TestBackend_PluginMain_MockV4", []string{})
vault.TestAddTestPlugin(t, cluster.Cores[0].Core, "mock-v5-database-plugin", consts.PluginTypeDatabase, "", "TestBackend_PluginMain_MockV5", []string{})
vault.TestAddTestPlugin(t, cluster.Cores[0].Core, "mock-v6-database-plugin-muxed", consts.PluginTypeDatabase, "", "TestBackend_PluginMain_MockV6Multiplexed", []string{})
env := []string{fmt.Sprintf("%s=%s", pluginutil.PluginCACertPEMEnv, cluster.CACertPEMFile)}
vault.TestAddTestPlugin(t, cluster.Cores[0].Core, "mock-v4-database-plugin", consts.PluginTypeDatabase, "", "TestBackend_PluginMain_MockV4", env)
vault.TestAddTestPlugin(t, cluster.Cores[0].Core, "mock-v5-database-plugin", consts.PluginTypeDatabase, "", "TestBackend_PluginMain_MockV5", env)
vault.TestAddTestPlugin(t, cluster.Cores[0].Core, "mock-v6-database-plugin-muxed", consts.PluginTypeDatabase, "", "TestBackend_PluginMain_MockV6Multiplexed", env)
config := logical.TestBackendConfig()
config.StorageView = &logical.InmemStorage{}

View File

@@ -140,9 +140,8 @@ func testConfig(t *testing.T, pluginCmd string) (*logical.BackendConfig, func())
},
}
os.Setenv(pluginutil.PluginCACertPEMEnv, cluster.CACertPEMFile)
vault.TestAddTestPlugin(t, core.Core, "mock-plugin", consts.PluginTypeSecrets, "", pluginCmd, []string{})
vault.TestAddTestPlugin(t, core.Core, "mock-plugin", consts.PluginTypeSecrets, "", pluginCmd,
[]string{fmt.Sprintf("%s=%s", pluginutil.PluginCACertPEMEnv, cluster.CACertPEMFile)})
return config, func() {
cluster.Cleanup()

3
changelog/18513.txt Normal file
View File

@@ -0,0 +1,3 @@
```release-note:improvement
ui: latest version of chrome does not automatically redirect back to the app after authentication unless triggered by the user, hence added a link to redirect back to the app.
```

6
changelog/24512.txt Normal file
View File

@@ -0,0 +1,6 @@
```release-note:change
plugins: Add a warning to the response from sys/plugins/reload/backend if no plugins were reloaded.
```
```release-note:improvement
secrets/database: Support reloading named database plugins using the sys/plugins/reload/backend API endpoint.
```

3
changelog/24660.txt Normal file
View File

@@ -0,0 +1,3 @@
```release-note:bug
ui: The UI can now be used to create or update database roles by operator without permission on the database connection.
```

3
changelog/24686.txt Normal file
View File

@@ -0,0 +1,3 @@
```release-note:bug
ui: fix incorrectly calculated capabilities on PKI issuer endpoints
```

3
changelog/24697.txt Normal file
View File

@@ -0,0 +1,3 @@
```release-note:bug
ui: Fixes input for jwks_ca_pem when configuring a JWT auth method
```

20
go.mod
View File

@@ -99,7 +99,7 @@ require (
github.com/hashicorp/go-kms-wrapping/wrappers/ocikms/v2 v2.0.7
github.com/hashicorp/go-kms-wrapping/wrappers/transit/v2 v2.0.8
github.com/hashicorp/go-memdb v1.3.4
github.com/hashicorp/go-msgpack v1.1.5
github.com/hashicorp/go-metrics v0.5.1
github.com/hashicorp/go-multierror v1.1.1
github.com/hashicorp/go-plugin v1.6.0
github.com/hashicorp/go-raftchunking v0.6.3-0.20191002164813-7e9e8525653a
@@ -120,16 +120,16 @@ require (
github.com/hashicorp/go-syslog v1.0.0
github.com/hashicorp/go-uuid v1.0.3
github.com/hashicorp/go-version v1.6.0
github.com/hashicorp/golang-lru v0.5.4
github.com/hashicorp/golang-lru v1.0.2
github.com/hashicorp/hcl v1.0.1-vault-5
github.com/hashicorp/hcl/v2 v2.16.2
github.com/hashicorp/hcp-link v0.2.1
github.com/hashicorp/hcp-scada-provider v0.2.2
github.com/hashicorp/hcp-sdk-go v0.23.0
github.com/hashicorp/nomad/api v0.0.0-20230519153805-2275a83cbfdf
github.com/hashicorp/raft v1.4.0
github.com/hashicorp/raft v1.6.0
github.com/hashicorp/raft-autopilot v0.2.0
github.com/hashicorp/raft-boltdb/v2 v2.2.2
github.com/hashicorp/raft-boltdb/v2 v2.3.0
github.com/hashicorp/raft-snapshot v1.0.4
github.com/hashicorp/raft-wal v0.4.0
github.com/hashicorp/vault-plugin-auth-alicloud v0.16.0
@@ -221,11 +221,11 @@ require (
golang.org/x/exp v0.0.0-20230817173708-d852ddb80c63
golang.org/x/net v0.17.0
golang.org/x/oauth2 v0.11.0
golang.org/x/sync v0.3.0
golang.org/x/sync v0.4.0
golang.org/x/sys v0.15.0
golang.org/x/term v0.15.0
golang.org/x/text v0.14.0
golang.org/x/tools v0.12.1-0.20230815132531-74c255bcf846
golang.org/x/tools v0.14.0
google.golang.org/api v0.139.0
google.golang.org/grpc v1.58.3
google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0
@@ -339,7 +339,7 @@ require (
github.com/docker/go-connections v0.4.0 // indirect
github.com/docker/go-units v0.5.0 // indirect
github.com/dsnet/compress v0.0.2-0.20210315054119-f66993602bf5 // indirect
github.com/dvsekhvalnov/jose2go v1.5.0 // indirect
github.com/dvsekhvalnov/jose2go v1.6.0 // indirect
github.com/emicklei/go-restful/v3 v3.10.1 // indirect
github.com/emirpasic/gods v1.18.1 // indirect
github.com/envoyproxy/go-control-plane v0.11.1 // indirect
@@ -391,8 +391,7 @@ require (
github.com/hailocab/go-hostpool v0.0.0-20160125115350-e80d13ce29ed // indirect
github.com/hashicorp/cronexpr v1.1.1 // indirect
github.com/hashicorp/go-immutable-radix v1.3.1 // indirect
github.com/hashicorp/go-metrics v0.5.1 // indirect
github.com/hashicorp/go-msgpack/v2 v2.1.0 // indirect
github.com/hashicorp/go-msgpack/v2 v2.1.1 // indirect
github.com/hashicorp/go-secure-stdlib/fileutil v0.1.0 // indirect
github.com/hashicorp/go-secure-stdlib/plugincontainer v0.3.0 // indirect
github.com/hashicorp/go-slug v0.12.1 // indirect
@@ -401,7 +400,6 @@ require (
github.com/hashicorp/logutils v1.0.0 // indirect
github.com/hashicorp/mdns v1.0.4 // indirect
github.com/hashicorp/net-rpc-msgpackrpc/v2 v2.0.0 // indirect
github.com/hashicorp/raft-boltdb v0.0.0-20230125174641-2a8082862702 // indirect
github.com/hashicorp/serf v0.10.1 // indirect
github.com/hashicorp/vault/api/auth/kubernetes v0.5.0 // indirect
github.com/hashicorp/vic v1.5.1-0.20190403131502-bbfe86ec9443 // indirect
@@ -522,7 +520,7 @@ require (
go.uber.org/multierr v1.7.0 // indirect
go.uber.org/zap v1.19.1 // indirect
golang.org/x/exp/typeparams v0.0.0-20221208152030-732eee02a75a // indirect
golang.org/x/mod v0.12.0 // indirect
golang.org/x/mod v0.13.0 // indirect
golang.org/x/time v0.3.0 // indirect
golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2 // indirect
google.golang.org/appengine v1.6.7 // indirect

33
go.sum
View File

@@ -1149,7 +1149,6 @@ github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5
github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY=
github.com/armon/go-metrics v0.0.0-20190430140413-ec5e00d3c878/go.mod h1:3AMJUQhVx52RsWOnlkpikZr01T/yAVN2gn0861vByNg=
github.com/armon/go-metrics v0.3.0/go.mod h1:zXjbSimjXTd7vOpY8B0/2LpvNvDoXBuplAD+gJD3GYs=
github.com/armon/go-metrics v0.3.8/go.mod h1:4O98XIr/9W0sxpJ8UaYkvjk10Iff7SnFrb4QAOwNTFc=
github.com/armon/go-metrics v0.4.1 h1:hR91U9KYmb6bLBYLQjyM+3j+rcd/UhE+G78SFnF8gJA=
github.com/armon/go-metrics v0.4.1/go.mod h1:E6amYzXo6aW1tqzoZGT755KkbgrJsSdpwZ+3JqfkOG4=
github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8=
@@ -1587,8 +1586,9 @@ github.com/dustin/go-humanize v0.0.0-20171111073723-bb3d318650d4/go.mod h1:Htrtb
github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk=
github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY=
github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto=
github.com/dvsekhvalnov/jose2go v1.5.0 h1:3j8ya4Z4kMCwT5nXIKFSV84YS+HdqSSO0VsTQxaLAeM=
github.com/dvsekhvalnov/jose2go v1.5.0/go.mod h1:QsHjhyTlD/lAVqn/NSbVZmSCGeDehTB/mPZadG+mhXU=
github.com/dvsekhvalnov/jose2go v1.6.0 h1:Y9gnSnP4qEI0+/uQkHvFXeD2PLPJeXEL+ySMEA2EjTY=
github.com/dvsekhvalnov/jose2go v1.6.0/go.mod h1:QsHjhyTlD/lAVqn/NSbVZmSCGeDehTB/mPZadG+mhXU=
github.com/elazarl/goproxy v0.0.0-20180725130230-947c36da3153/go.mod h1:/Zj4wYkgs4iZTTu3o/KG3Itv/qCCa8VVMlb3i9OVuzc=
github.com/elazarl/goproxy v0.0.0-20221015165544-a0805db90819 h1:RIB4cRk+lBqKK3Oy0r2gRX4ui7tuhiZq2SuTtTCi0/0=
github.com/elazarl/goproxy v0.0.0-20221015165544-a0805db90819/go.mod h1:Ro8st/ElPeALwNFlcTpWmkr6IoMFfkjXAvTHpevnDsM=
@@ -2212,8 +2212,8 @@ github.com/hashicorp/go-msgpack v0.5.3/go.mod h1:ahLV/dePpqEmjfWmKiqvPkv/twdG7iP
github.com/hashicorp/go-msgpack v0.5.5/go.mod h1:ahLV/dePpqEmjfWmKiqvPkv/twdG7iPBM1vqhUKIvfM=
github.com/hashicorp/go-msgpack v1.1.5 h1:9byZdVjKTe5mce63pRVNP1L7UAmdHOTEMGehn6KvJWs=
github.com/hashicorp/go-msgpack v1.1.5/go.mod h1:gWVc3sv/wbDmR3rQsj1CAktEZzoz1YNK9NfGLXJ69/4=
github.com/hashicorp/go-msgpack/v2 v2.1.0 h1:J2g2hMyjSefUPTnkLRU2MnsLLsPRB1n4Z/wJRN07GuA=
github.com/hashicorp/go-msgpack/v2 v2.1.0/go.mod h1:Tv81cKI2JmHZDjmzEmc1n+8h1DO5k+3pG6BPlNMQds0=
github.com/hashicorp/go-msgpack/v2 v2.1.1 h1:xQEY9yB2wnHitoSzk/B9UjXWRQ67QKu5AOm8aFp8N3I=
github.com/hashicorp/go-msgpack/v2 v2.1.1/go.mod h1:upybraOAblm4S7rx0+jeNy+CWWhzywQsSRV5033mMu4=
github.com/hashicorp/go-multierror v0.0.0-20161216184304-ed905158d874/go.mod h1:JMRHfdO9jKNzS/+BTlxCjKNQHg/jZAft8U7LloJvN7I=
github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk=
github.com/hashicorp/go-multierror v1.1.0/go.mod h1:spPvp8C1qA32ftKqdAHm4hHTbPw+vmowP0z+KUhOZdA=
@@ -2282,8 +2282,9 @@ github.com/hashicorp/go-version v1.6.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09
github.com/hashicorp/go.net v0.0.1/go.mod h1:hjKkEWcCURg++eb33jQU7oqQcI9XDCnUzHA0oac0k90=
github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
github.com/hashicorp/golang-lru v0.5.4 h1:YDjusn29QI/Das2iO9M0BHnIbxPeyuCHsjMW+lJfyTc=
github.com/hashicorp/golang-lru v0.5.4/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4=
github.com/hashicorp/golang-lru v1.0.2 h1:dV3g9Z/unq5DpblPpw+Oqcv4dU/1omnb4Ok8iPY6p1c=
github.com/hashicorp/golang-lru v1.0.2/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4=
github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ=
github.com/hashicorp/hcl v1.0.1-vault-5 h1:kI3hhbbyzr4dldA8UdTb7ZlVVlI2DACdCfz31RPDgJM=
github.com/hashicorp/hcl v1.0.1-vault-5/go.mod h1:XYhtn6ijBSAj6n4YqAaf7RBPS4I06AItNorpy+MoQNM=
@@ -2311,19 +2312,17 @@ github.com/hashicorp/net-rpc-msgpackrpc/v2 v2.0.0/go.mod h1:6pdNz0vo0mF0GvhwDG56
github.com/hashicorp/nomad/api v0.0.0-20230519153805-2275a83cbfdf h1:cKXVf1UJqwdkGiTF3idqCOLApAql0310OSmJxeiaMWg=
github.com/hashicorp/nomad/api v0.0.0-20230519153805-2275a83cbfdf/go.mod h1:rb38DqjaaIfhJRiLeCAGgIt+wV7o78rB+liyFE3mVzE=
github.com/hashicorp/raft v1.0.1/go.mod h1:DVSAWItjLjTOkVbSpWQ0j0kUADIvDaCtBxIcbNAQLkI=
github.com/hashicorp/raft v1.1.0/go.mod h1:4Ak7FSPnuvmb0GV6vgIAJ4vYT4bek9bb6Q+7HVbyzqM=
github.com/hashicorp/raft v1.1.2-0.20191002163536-9c6bd3e3eb17/go.mod h1:vPAJM8Asw6u8LxC3eJCUZmRP/E4QmUGE1R7g7k8sG/8=
github.com/hashicorp/raft v1.2.0/go.mod h1:vPAJM8Asw6u8LxC3eJCUZmRP/E4QmUGE1R7g7k8sG/8=
github.com/hashicorp/raft v1.4.0 h1:tn28S/AWv0BtRQgwZv/1NELu8sCvI0FixqL8C8MYKeY=
github.com/hashicorp/raft v1.4.0/go.mod h1:nz64BIjXphDLATfKGG5RzHtNUPioLeKFsXEm88yTVew=
github.com/hashicorp/raft v1.6.0 h1:tkIAORZy2GbJ2Trp5eUSggLXDPOJLXC+JJLNMMqtgtM=
github.com/hashicorp/raft v1.6.0/go.mod h1:Xil5pDgeGwRWuX4uPUmwa+7Vagg4N804dz6mhNi6S7o=
github.com/hashicorp/raft-autopilot v0.2.0 h1:2/R2RPgamgRKgNWGQioULZvjeKXQZmDuw5Ty+6c+H7Y=
github.com/hashicorp/raft-autopilot v0.2.0/go.mod h1:q6tZ8UAZ5xio2gv2JvjgmtOlh80M6ic8xQYBe2Egkg8=
github.com/hashicorp/raft-boltdb v0.0.0-20171010151810-6e5ba93211ea/go.mod h1:pNv7Wc3ycL6F5oOWn+tPGo2gWD4a5X+yp/ntwdKLjRk=
github.com/hashicorp/raft-boltdb v0.0.0-20210409134258-03c10cc3d4ea/go.mod h1:qRd6nFJYYS6Iqnc/8HcUmko2/2Gw8qTFEmxDLii6W5I=
github.com/hashicorp/raft-boltdb v0.0.0-20230125174641-2a8082862702 h1:RLKEcCuKcZ+qp2VlaaZsYZfLOmIiuJNpEi48Rl8u9cQ=
github.com/hashicorp/raft-boltdb v0.0.0-20230125174641-2a8082862702/go.mod h1:nTakvJ4XYq45UXtn0DbwR4aU9ZdjlnIenpbs6Cd+FM0=
github.com/hashicorp/raft-boltdb/v2 v2.2.2 h1:rlkPtOllgIcKLxVT4nutqlTH2NRFn+tO1wwZk/4Dxqw=
github.com/hashicorp/raft-boltdb/v2 v2.2.2/go.mod h1:N8YgaZgNJLpZC+h+by7vDu5rzsRgONThTEeUS3zWbfY=
github.com/hashicorp/raft-boltdb/v2 v2.3.0 h1:fPpQR1iGEVYjZ2OELvUHX600VAK5qmdnDEv3eXOwZUA=
github.com/hashicorp/raft-boltdb/v2 v2.3.0/go.mod h1:YHukhB04ChJsLHLJEUD6vjFyLX2L3dsX3wPBZcX4tmc=
github.com/hashicorp/raft-snapshot v1.0.4 h1:EuDuayAJPdiDmVk1ygTDnG2zDzrs0/6/yBuma1IYSow=
github.com/hashicorp/raft-snapshot v1.0.4/go.mod h1:5sL9eUn72lH5DzsFIJ9jaysITbHksSSszImWSOTC8Ic=
github.com/hashicorp/raft-wal v0.4.0 h1:oHCQLPa3gBTrfuBVHaDg2b/TVXpU0RIyeH/mU9ovk3Y=
@@ -3565,8 +3564,8 @@ golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
golang.org/x/mod v0.9.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
golang.org/x/mod v0.10.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
golang.org/x/mod v0.11.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
golang.org/x/mod v0.12.0 h1:rmsUpXtvNzj340zd98LZ4KntptpfRHwpFOHG188oHXc=
golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
golang.org/x/mod v0.13.0 h1:I/DsJXRlw/8l/0c24sM9yb0T4z9liZTduXvdAWYiysY=
golang.org/x/mod v0.13.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
golang.org/x/net v0.0.0-20170114055629-f2499483f923/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
@@ -3719,8 +3718,9 @@ golang.org/x/sync v0.0.0-20220819030929-7fc1605a5dde/go.mod h1:RxMgew5VJxzue5/jJ
golang.org/x/sync v0.0.0-20220929204114-8fcdb60fdcc0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.2.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.3.0 h1:ftCYgMx6zT/asHUrPw8BLLscYtGznsLAnjq5RH9P66E=
golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y=
golang.org/x/sync v0.4.0 h1:zxkM55ReGkDlKSM+Fu41A+zmbZuaPVbGMzvvdUPznYQ=
golang.org/x/sync v0.4.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y=
golang.org/x/sys v0.0.0-20170830134202-bb24a47a89ea/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
@@ -3965,7 +3965,6 @@ golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3
golang.org/x/tools v0.0.0-20190329151228-23e29df326fe/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190416151739-9c9e1878f421/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190420181800-aa740d480789/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190424220101-1e8e1cfdf96b/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
golang.org/x/tools v0.0.0-20190425163242-31fd60d6bfdc/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
@@ -4048,8 +4047,8 @@ golang.org/x/tools v0.7.0/go.mod h1:4pg6aUX35JBAogB10C9AtvVL+qowtN4pT3CGSQex14s=
golang.org/x/tools v0.8.0/go.mod h1:JxBZ99ISMI5ViVkT1tr6tdNmXeTrcpVSD3vZ1RsRdN4=
golang.org/x/tools v0.9.1/go.mod h1:owI94Op576fPu3cIGQeHs3joujW/2Oc6MtlxbF5dfNc=
golang.org/x/tools v0.10.0/go.mod h1:UJwyiVBsOA2uwvK/e5OY3GTpDUJriEd+/YlqAwLPmyM=
golang.org/x/tools v0.12.1-0.20230815132531-74c255bcf846 h1:Vve/L0v7CXXuxUmaMGIEK/dEeq7uiqb5qBgQrZzIE7E=
golang.org/x/tools v0.12.1-0.20230815132531-74c255bcf846/go.mod h1:Sc0INKfu04TlqNoRA1hgpFZbhYXHPr4V5DzpSBTPqQM=
golang.org/x/tools v0.14.0 h1:jvNa2pY0M4r62jkRQ6RwEZZyPcymeL9XZMLBbV7U2nc=
golang.org/x/tools v0.14.0/go.mod h1:uYBEerGOWcJyEORxN+Ek8+TT266gXkNlHdJBwexUsBg=
golang.org/x/xerrors v0.0.0-20190410155217-1f06c39b4373/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20190513163551-3ee3066db522/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=

View File

@@ -535,7 +535,7 @@ func (n *NoopAudit) Invalidate(_ context.Context) {
// the audit.Backend interface.
func (n *NoopAudit) RegisterNodesAndPipeline(broker *eventlogger.Broker, name string) error {
for id, node := range n.nodeMap {
if err := broker.RegisterNode(id, node, eventlogger.WithNodeRegistrationPolicy(eventlogger.DenyOverwrite)); err != nil {
if err := broker.RegisterNode(id, node); err != nil {
return err
}
}
@@ -546,7 +546,7 @@ func (n *NoopAudit) RegisterNodesAndPipeline(broker *eventlogger.Broker, name st
NodeIDs: n.nodeIDList,
}
return broker.RegisterPipeline(pipeline, eventlogger.WithPipelineRegistrationPolicy(eventlogger.DenyOverwrite))
return broker.RegisterPipeline(pipeline)
}
type TestLogger struct {
@@ -632,3 +632,7 @@ func (n *NoopAudit) Nodes() map[eventlogger.NodeID]eventlogger.Node {
func (n *NoopAudit) NodeIDs() []eventlogger.NodeID {
return n.nodeIDList
}
func (n *NoopAudit) IsFallback() bool {
return false
}

View File

@@ -5,6 +5,7 @@ package http
import (
"encoding/json"
"fmt"
"io/ioutil"
"os"
"reflect"
@@ -55,10 +56,9 @@ func getPluginClusterAndCore(t *testing.T, logger log.Logger) (*vault.TestCluste
cores := cluster.Cores
core := cores[0]
os.Setenv(pluginutil.PluginCACertPEMEnv, cluster.CACertPEMFile)
vault.TestWaitActive(benchhelpers.TBtoT(t), core.Core)
vault.TestAddTestPlugin(benchhelpers.TBtoT(t), core.Core, "mock-plugin", consts.PluginTypeSecrets, "", "TestPlugin_PluginMain", []string{})
vault.TestAddTestPlugin(benchhelpers.TBtoT(t), core.Core, "mock-plugin", consts.PluginTypeSecrets, "", "TestPlugin_PluginMain",
[]string{fmt.Sprintf("%s=%s", pluginutil.PluginCACertPEMEnv, cluster.CACertPEMFile)})
// Mount the mock plugin
err = core.Client.Sys().Mount("mock", &api.MountInput{

View File

@@ -1,13 +0,0 @@
// Copyright (c) HashiCorp, Inc.
// SPDX-License-Identifier: BUSL-1.1
package raft
// If we downgrade msgpack from v1.1.5 to v0.5.5, everything will still
// work, but any pre-existing raft clusters will break on upgrade.
// This file exists so that the Vault project has an explicit dependency
// on the library, which allows us to pin the version in go.mod.
import (
_ "github.com/hashicorp/go-msgpack/codec"
)

View File

@@ -435,8 +435,9 @@ func NewRaftBackend(conf map[string]string, logger log.Logger) (physical.Backend
// use the traditional BoltDB setup
opts := etcdboltOptions(dbPath)
raftOptions := raftboltdb.Options{
Path: dbPath,
BoltOptions: opts,
Path: dbPath,
BoltOptions: opts,
MsgpackUseNewTimeFormat: true,
}
store, err := raftboltdb.New(raftOptions)
@@ -1097,11 +1098,12 @@ func (b *RaftBackend) SetupCluster(ctx context.Context, opts SetupOpts) error {
return err
}
transConfig := &raft.NetworkTransportConfig{
Stream: streamLayer,
MaxPool: 3,
Timeout: 10 * time.Second,
ServerAddressProvider: b.serverAddressProvider,
Logger: b.logger.Named("raft-net"),
Stream: streamLayer,
MaxPool: 3,
Timeout: 10 * time.Second,
ServerAddressProvider: b.serverAddressProvider,
Logger: b.logger.Named("raft-net"),
MsgpackUseNewTimeFormat: true,
}
transport := raft.NewNetworkTransportWithConfig(transConfig)

View File

@@ -46,9 +46,9 @@ require (
github.com/ryanuber/go-glob v1.0.0
github.com/stretchr/testify v1.8.3
go.uber.org/atomic v1.9.0
golang.org/x/crypto v0.14.0
golang.org/x/crypto v0.17.0
golang.org/x/net v0.17.0
golang.org/x/text v0.13.0
golang.org/x/text v0.14.0
google.golang.org/grpc v1.57.2
google.golang.org/protobuf v1.31.0
)
@@ -104,8 +104,8 @@ require (
go.opencensus.io v0.24.0 // indirect
golang.org/x/mod v0.9.0 // indirect
golang.org/x/oauth2 v0.11.0 // indirect
golang.org/x/sys v0.13.0 // indirect
golang.org/x/term v0.13.0 // indirect
golang.org/x/sys v0.15.0 // indirect
golang.org/x/term v0.15.0 // indirect
golang.org/x/time v0.3.0 // indirect
golang.org/x/tools v0.7.0 // indirect
google.golang.org/api v0.134.0 // indirect

View File

@@ -543,8 +543,8 @@ golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5y
golang.org/x/crypto v0.0.0-20220314234659-1baeb1ce4c0b/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
golang.org/x/crypto v0.6.0/go.mod h1:OFC/31mSvZgRz0V1QTNCzfAI1aIRzbiufJtkMIlEp58=
golang.org/x/crypto v0.14.0 h1:wBqGXzWJW6m1XrIKlAH0Hs1JJ7+9KBwnIO8v66Q9cHc=
golang.org/x/crypto v0.14.0/go.mod h1:MVFd36DqK4CsrnJYDkBA3VC4m2GkXAM0PvzMCn4JQf4=
golang.org/x/crypto v0.17.0 h1:r8bRNjWL3GshPW3gkd+RpvzWrZAwPS49OmTGZ/uhM4k=
golang.org/x/crypto v0.17.0/go.mod h1:gCAAfMLgwOJRpTjQ2zCCt2OcSfYMTeZVSRtQlPC7Nq4=
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
@@ -683,14 +683,14 @@ golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBc
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.13.0 h1:Af8nKPmuFypiUBjVoU9V20FiaFXOcuZI21p0ycVYYGE=
golang.org/x/sys v0.13.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.15.0 h1:h48lPFYpsTvQJZF4EKyI4aLHaev3CxivZmv7yZig9pc=
golang.org/x/sys v0.15.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
golang.org/x/term v0.13.0 h1:bb+I9cTfFazGW51MZqBVmZy7+JEJMouUHTUSKVQLBek=
golang.org/x/term v0.13.0/go.mod h1:LTmsnFJwVN6bCy1rVCoS+qHT1HhALEFxKncY3WNNh4U=
golang.org/x/term v0.15.0 h1:y/Oo/a/q3IXu26lQgl04j/gjuBDOBlx7X6Om1j2CPW4=
golang.org/x/term v0.15.0/go.mod h1:BDl952bC7+uMoWR75FIrCDx79TPU9oHkTZ9yRbYOrX0=
golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
@@ -702,8 +702,8 @@ golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ=
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/text v0.13.0 h1:ablQoSUd0tRdKxZewP80B+BaqeKJuVhuRxj/dkrun3k=
golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ=
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=

View File

@@ -164,7 +164,7 @@ export default ApplicationAdapter.extend({
db: db[0],
});
} catch (e) {
throw new Error('Could not update allowed roles for selected database. Check Vault logs for details');
this.checkError(e);
}
return this.ajax(this.urlFor(backend, id, roleType), 'POST', { data }).then(() => {
@@ -180,12 +180,16 @@ export default ApplicationAdapter.extend({
const backend = snapshot.attr('backend');
const id = snapshot.attr('name');
const db = snapshot.attr('database');
await this._updateAllowedRoles(store, {
role: id,
backend,
db: db[0],
type: 'remove',
});
try {
await this._updateAllowedRoles(store, {
role: id,
backend,
db: db[0],
type: 'remove',
});
} catch (e) {
this.checkError(e);
}
return this.ajax(this.urlFor(backend, id, roleType), 'DELETE');
},
@@ -199,4 +203,14 @@ export default ApplicationAdapter.extend({
return this.ajax(this.urlFor(backend, id, roleType), 'POST', { data }).then(() => data);
},
checkError(e) {
if (e.httpStatus === 403) {
// The user does not have the permission to update the connection. This
// can happen if their permissions are limited to the role. In that case
// we ignore the error and continue updating the role.
return;
}
throw new Error(`Could not update allowed roles for selected database: ${e.errors.join(', ')}`);
},
});

View File

@@ -27,9 +27,6 @@ export default class DatabaseRoleEdit extends Component {
get warningMessages() {
const warnings = {};
if (this.args.model.canUpdateDb === false) {
warnings.database = `You dont have permissions to update this database connection, so this role cannot be created.`;
}
if (
(this.args.model.type === 'dynamic' && this.args.model.canCreateDynamic === false) ||
(this.args.model.type === 'static' && this.args.model.canCreateStatic === false)

View File

@@ -24,12 +24,31 @@ export default AuthConfig.extend({
oidcClientSecret: attr('string', {
label: 'OIDC client secret',
}),
oidcDiscoveryCaPem: attr('string', {
label: 'OIDC discovery CA PEM',
editType: 'file',
helpText:
'The CA certificate or chain of certificates, in PEM format, to use to validate connections to the OIDC Discovery URL. If not set, system certificates are used',
}),
jwksCaPem: attr('string', {
label: 'JWKS CA PEM',
editType: 'file',
}),
jwksUrl: attr('string', {
label: 'JWKS URL',
}),
oidcResponseMode: attr('string', {
label: 'OIDC response mode',
}),
oidcResponseTypes: attr('string', {
label: 'OIDC response types',
}),
jwtValidationPubkeys: attr({
label: 'JWT validation public keys',
editType: 'stringArray',
@@ -38,14 +57,23 @@ export default AuthConfig.extend({
jwtSupportedAlgs: attr({
label: 'JWT supported algorithms',
}),
boundIssuer: attr('string', {
helpText: 'The value against which to match the iss claim in a JWT',
}),
fieldGroups: computed('constructor.modelName', 'newFields', function () {
const type = this.constructor.modelName.split('/')[1].toUpperCase();
let groups = [
{
default: ['oidcDiscoveryUrl', 'defaultRole'],
default: [
'oidcDiscoveryUrl',
'defaultRole',
'jwksCaPem',
'jwksUrl',
'oidcResponseMode',
'oidcResponseTypes',
],
},
{
[`${type} Options`]: [

View File

@@ -10,8 +10,8 @@ import lazyCapabilities, { apiPath } from 'vault/macros/lazy-capabilities';
export default Model.extend({
approved: attr('boolean'),
requestPath: attr('string'),
requestEntity: belongsTo('identity/entity', { async: false }),
authorizations: hasMany('identity/entity', { async: false }),
requestEntity: belongsTo('identity/entity', { async: false, inverse: null }),
authorizations: hasMany('identity/entity', { async: false, inverse: null }),
authorizePath: lazyCapabilities(apiPath`sys/control-group/authorize`),
canAuthorize: alias('authorizePath.canUpdate'),

View File

@@ -14,7 +14,7 @@ export default IdentityModel.extend({
formFields: computed(function () {
return ['name', 'mountAccessor'];
}),
entity: belongsTo('identity/entity', { readOnly: true, async: false }),
entity: belongsTo('identity/entity', { readOnly: true, async: false, inverse: 'aliases' }),
name: attr('string'),
canonicalId: attr('string'),

View File

@@ -34,7 +34,7 @@ export default IdentityModel.extend({
lastUpdateTime: attr('string', {
readOnly: true,
}),
aliases: hasMany('identity/entity-alias', { async: false, readOnly: true }),
aliases: hasMany('identity/entity-alias', { async: false, readOnly: true, inverse: 'entity' }),
groupIds: attr({
readOnly: true,
}),

View File

@@ -77,7 +77,7 @@ export default IdentityModel.extend({
),
policyPath: lazyCapabilities(apiPath`sys/policies`),
canCreatePolicies: alias('policyPath.canCreate'),
alias: belongsTo('identity/group-alias', { async: false, readOnly: true }),
alias: belongsTo('identity/group-alias', { async: false, readOnly: true, inverse: 'group' }),
updatePath: identityCapabilities(),
canDelete: alias('updatePath.canDelete'),
canEdit: alias('updatePath.canUpdate'),

View File

@@ -6,7 +6,7 @@
import Model, { belongsTo, attr } from '@ember-data/model';
export default Model.extend({
config: belongsTo('kmip/config', { async: false }),
config: belongsTo('kmip/config', { async: false, inverse: 'ca' }),
caPem: attr('string', {
label: 'CA PEM',
}),

View File

@@ -10,7 +10,7 @@ import fieldToAttrs from 'vault/utils/field-to-attrs';
export default Model.extend({
useOpenAPI: true,
ca: belongsTo('kmip/ca', { async: false }),
ca: belongsTo('kmip/ca', { async: false, inverse: 'config' }),
getHelpUrl(path) {
return `/v1/${path}/config?help=1`;
},

View File

@@ -37,12 +37,12 @@ const validations = {
export default class MfaLoginEnforcementModel extends Model {
@service store;
@attr('string') name;
@hasMany('mfa-method') mfa_methods;
@hasMany('mfa-method', { async: true, inverse: null }) mfa_methods;
@attr('string') namespace_id;
@attr('array', { defaultValue: () => [] }) auth_method_accessors; // ["auth_approle_17a552c6"]
@attr('array', { defaultValue: () => [] }) auth_method_types; // ["userpass"]
@hasMany('identity/entity') identity_entities;
@hasMany('identity/group') identity_groups;
@hasMany('identity/entity', { async: true, inverse: null }) identity_entities;
@hasMany('identity/group', { async: true, inverse: null }) identity_groups;
get targets() {
return ArrayProxy.extend(PromiseProxyMixin).create({

View File

@@ -135,13 +135,14 @@ export default class PkiIssuerModel extends Model {
@attr importedKeys;
@attr mapping;
@lazyCapabilities(apiPath`${'backend'}/issuer/${'issuerId'}`) issuerPath;
@lazyCapabilities(apiPath`${'backend'}/root/rotate/exported`) rotateExported;
@lazyCapabilities(apiPath`${'backend'}/root/rotate/internal`) rotateInternal;
@lazyCapabilities(apiPath`${'backend'}/root/rotate/existing`) rotateExisting;
@lazyCapabilities(apiPath`${'backend'}/issuer/${'issuerId'}`, 'backend', 'issuerId') issuerPath;
@lazyCapabilities(apiPath`${'backend'}/root/rotate/exported`, 'backend') rotateExported;
@lazyCapabilities(apiPath`${'backend'}/root/rotate/internal`, 'backend') rotateInternal;
@lazyCapabilities(apiPath`${'backend'}/root/rotate/existing`, 'backend') rotateExisting;
@lazyCapabilities(apiPath`${'backend'}/root`, 'backend') deletePath;
@lazyCapabilities(apiPath`${'backend'}/intermediate/cross-sign`) crossSignPath;
@lazyCapabilities(apiPath`${'backend'}/issuer/${'issuerId'}/sign-intermediate`) signIntermediate;
@lazyCapabilities(apiPath`${'backend'}/intermediate/cross-sign`, 'backend') crossSignPath;
@lazyCapabilities(apiPath`${'backend'}/issuer/${'issuerId'}/sign-intermediate`, 'backend', 'issuerId')
signIntermediate;
get canRotateIssuer() {
return (
this.rotateExported.get('canUpdate') !== false ||

View File

@@ -8,8 +8,8 @@ import { attr } from '@ember-data/model';
import { withFormFields } from 'vault/decorators/model-form-fields';
const displayFields = ['name', 'keyVaultUri', 'tenantId', 'cloud', 'clientId', 'clientSecret'];
const formFieldGroups = [
{ default: ['name', 'tenantId', 'cloud', 'clientId'] },
{ Credentials: ['keyVaultUri', 'clientSecret'] },
{ default: ['name', 'keyVaultUri', 'tenantId', 'cloud', 'clientId'] },
{ Credentials: ['clientSecret'] },
];
@withFormFields(displayFields, formFieldGroups)
export default class SyncDestinationsAzureKeyVaultModel extends SyncDestinationModel {

View File

@@ -94,17 +94,17 @@ export default class VaultClusterOidcProviderRoute extends Route {
_handleSuccess(response, baseUrl, state) {
const { code } = response;
const redirectUrl = this._buildUrl(baseUrl, { code, state });
if (Ember.testing) {
return { redirectUrl };
if (!Ember.testing) {
this.win.location.replace(redirectUrl);
}
this.win.location.replace(redirectUrl);
return { redirectUrl };
}
_handleError(errorResp, baseUrl) {
const redirectUrl = this._buildUrl(baseUrl, { ...errorResp });
if (Ember.testing) {
return { redirectUrl };
if (!Ember.testing) {
this.win.location.replace(redirectUrl);
}
this.win.location.replace(redirectUrl);
return { redirectUrl };
}
/**

View File

@@ -110,6 +110,9 @@
}
}
.opacity-050 {
opacity: 0.5;
}
.opacity-060 {
opacity: 0.6;
}

View File

@@ -22,7 +22,9 @@
@onSuccess={{this._handleSuccess}}
/>
{{else if this.model.redirectUrl}}
<div data-test-oidc-redirect>{{this.model.redirectUrl}}</div>
<VaultLogoSpinner />
<p>If you are not automatically redirected,
<a href={{this.model.redirectUrl}} data-test-oidc-redirect>click here to go back to app.</a></p>
{{else}}
<VaultLogoSpinner />
{{/if}}

View File

@@ -6,7 +6,7 @@
<div>
<div class="field">
<p class="control has-icons-left has-icons-right">
<span class="input has-text-grey-light">{{or @placeholder "Search"}}</span>
<span class="input opacity-050">{{or @placeholder "Search"}}</span>
<Icon @name="search" class="search-icon has-text-grey-light" />
</p>
</div>

View File

@@ -6,10 +6,11 @@
<KvPageHeader @breadcrumbs={{@breadcrumbs}} @pageTitle={{@path}}>
<:syncDetails>
{{#if this.syncStatus}}
<Hds::Alert data-test-sync-alert @type="page" @color="neutral" @icon={{false}} as |A|>
<Hds::Alert data-test-sync-alert @type="page" @color="neutral" as |A|>
<A.Title>
This secret has been synced from Vault to other destinations, updates to the secret will get automatically synced
to destinations.
This secret has been synced from Vault to
{{pluralize this.syncStatus.length "destination"}}. Updates to this secret will automatically sync to its
{{if (eq this.syncStatus.length 1) "destination" "destinations"}}.
</A.Title>
{{#each this.syncStatus as |status|}}

View File

@@ -8,7 +8,8 @@
"ember-cli-htmlbars": "*",
"ember-cli-babel": "*",
"ember-concurrency": "*",
"@ember/test-waiters": "*"
"@ember/test-waiters": "*",
"ember-inflector": "*"
},
"ember-addon": {
"paths": [

View File

@@ -28,18 +28,17 @@
class="is-marginless"
data-test-filter="type"
/>
<SearchSelect
@options={{this.destinationNames}}
@objectKeys={{array "id" "name"}}
@passObject={{true}}
@selectLimit={{1}}
@disallowNewItems={{true}}
@placeholder="Filter by name"
@inputValue={{if @nameFilter (array @nameFilter)}}
@onChange={{fn this.onFilterChange "name"}}
class="is-marginless has-left-padding-s"
data-test-filter="name"
/>
<div class="has-left-margin-s">
<FilterInput
id="name-filter"
aria-label="Filter by name"
placeholder="Filter by name"
value={{@nameFilter}}
data-test-filter="name"
@autofocus={{true}}
@onInput={{fn this.onFilterChange "name"}}
/>
</div>
</ToolbarFilters>
<ToolbarActions>
<ToolbarLink @route="secrets.destinations.create" @type="add" data-test-create-destination>

View File

@@ -9,6 +9,7 @@ import { action } from '@ember/object';
import { getOwner } from '@ember/application';
import errorMessage from 'vault/utils/error-message';
import { findDestination, syncDestinations } from 'core/helpers/sync-destinations';
import { next } from '@ember/runloop';
import type SyncDestinationModel from 'vault/vault/models/sync/destination';
import type RouterService from '@ember/routing/router-service';
@@ -16,6 +17,7 @@ import type StoreService from 'vault/services/store';
import type FlashMessageService from 'vault/services/flash-messages';
import type { EngineOwner } from 'vault/vault/app-types';
import type { SyncDestinationName, SyncDestinationType } from 'vault/vault/helpers/sync-destinations';
import type Transition from '@ember/routing/transition';
interface Args {
destinations: Array<SyncDestinationModel>;
@@ -28,15 +30,31 @@ export default class SyncSecretsDestinationsPageComponent extends Component<Args
@service declare readonly store: StoreService;
@service declare readonly flashMessages: FlashMessageService;
// for some reason there isn't a full page refresh happening when transitioning on filter change
// when the transition happens it causes the FilterInput component to lose focus since it can only focus on didInsert
// to work around this, verify that a transition from this route was completed and then focus the input
constructor(owner: unknown, args: Args) {
super(owner, args);
this.router.on('routeDidChange', this.focusNameFilter);
}
willDestroy(): void {
super.willDestroy();
this.router.off('routeDidChange', this.focusNameFilter);
}
focusNameFilter(transition?: Transition) {
const route = 'vault.cluster.sync.secrets.destinations.index';
if (transition?.from?.name === route && transition?.to?.name === route) {
next(() => document.getElementById('name-filter')?.focus());
}
}
// typeFilter arg comes in as destination type but we need to pass the destination display name into the SearchSelect
get typeFilterName() {
return findDestination(this.args.typeFilter)?.name;
}
get destinationNames() {
return this.args.destinations.map((destination) => ({ id: destination.name, name: destination.name }));
}
get destinationTypes() {
return syncDestinations().map((d) => ({ id: d.name, name: d.type }));
}
@@ -65,9 +83,10 @@ export default class SyncSecretsDestinationsPageComponent extends Component<Args
}
@action
onFilterChange(key: string, selectObject: Array<{ id: string; name: string } | undefined>) {
onFilterChange(key: string, value: { id: string; name: string }[] | string | undefined) {
const queryValue = Array.isArray(value) ? value[0]?.name : value;
this.router.transitionTo('vault.cluster.sync.secrets.destinations', {
queryParams: { [key]: selectObject[0]?.name },
queryParams: { [key]: queryValue },
});
}

View File

@@ -59,7 +59,7 @@ export default class DestinationsCreateForm extends Component<Args> {
@waitFor
*save(event: Event) {
event.preventDefault();
this.error = '';
// clear out validation warnings
this.modelValidations = null;
const { destination } = this.args;

View File

@@ -37,7 +37,7 @@
<p class="sub-text">
Select a KV engine mount and path to sync a secret to the
{{@destination.typeDisplayName}}
destination.
destination. Selecting a previously synced secret will re-sync that secret.
</p>
<div class="has-top-margin-l">

View File

@@ -104,7 +104,7 @@
<dd.Interactive
@route="secrets.destinations.destination.secrets"
@models={{array data.type data.name}}
@text="Details"
@text="View synced secrets"
data-test-overview-table-action="details"
/>
</Hds::Dropdown>

View File

@@ -8,12 +8,24 @@ import { inject as service } from '@ember/service';
import { hash } from 'rsvp';
import type StoreService from 'vault/services/store';
import SyncDestinationModel from 'vault/vault/models/sync/destination';
import type SyncDestinationModel from 'vault/vault/models/sync/destination';
import type SyncAssociationModel from 'vault/vault/models/sync/association';
import type Controller from '@ember/controller';
interface SyncDestinationSecretsRouteParams {
page: string;
}
interface SyncDestinationSecretsRouteModel {
destination: SyncDestinationModel;
associations: SyncAssociationModel[];
}
interface SyncDestinationSecretsController extends Controller {
model: SyncDestinationSecretsRouteModel;
page: number | undefined;
}
export default class SyncDestinationSecretsRoute extends Route {
@service declare readonly store: StoreService;
@@ -35,4 +47,10 @@ export default class SyncDestinationSecretsRoute extends Route {
}),
});
}
resetController(controller: SyncDestinationSecretsController, isExiting: boolean) {
if (isExiting) {
controller.set('page', undefined);
}
}
}

View File

@@ -11,6 +11,7 @@ import type StoreService from 'vault/services/store';
import type RouterService from '@ember/routing/router-service';
import type { ModelFrom } from 'vault/vault/route';
import type SyncDestinationModel from 'vault/vault/models/sync/destination';
import type Controller from '@ember/controller';
interface SyncSecretsDestinationsIndexRouteParams {
name: string;
@@ -18,6 +19,19 @@ interface SyncSecretsDestinationsIndexRouteParams {
page: string;
}
interface SyncSecretsDestinationsRouteModel {
destinations: SyncDestinationModel[];
nameFilter: string | undefined;
typeFilter: string | undefined;
}
interface SyncSecretsDestinationsController extends Controller {
model: SyncSecretsDestinationsRouteModel;
page: number | undefined;
name: number | undefined;
type: number | undefined;
}
export default class SyncSecretsDestinationsIndexRoute extends Route {
@service declare readonly store: StoreService;
@service declare readonly router: RouterService;
@@ -35,7 +49,7 @@ export default class SyncSecretsDestinationsIndexRoute extends Route {
};
redirect(model: ModelFrom<SyncSecretsDestinationsIndexRoute>) {
if (model.destinations.length === 0) {
if (!model.destinations.meta.total) {
this.router.transitionTo('vault.cluster.sync.secrets.overview');
}
}
@@ -43,7 +57,7 @@ export default class SyncSecretsDestinationsIndexRoute extends Route {
filterData(dataset: Array<SyncDestinationModel>, name: string, type: string): Array<SyncDestinationModel> {
let filteredDataset = dataset;
const filter = (key: keyof SyncDestinationModel, value: string) => {
return dataset.filter((model) => {
return filteredDataset.filter((model) => {
return model[key].toLowerCase().includes(value.toLowerCase());
});
};
@@ -68,4 +82,14 @@ export default class SyncSecretsDestinationsIndexRoute extends Route {
typeFilter: params.type,
});
}
resetController(controller: SyncSecretsDestinationsController, isExiting: boolean) {
if (isExiting) {
controller.setProperties({
page: undefined,
name: undefined,
type: undefined,
});
}
}
}

View File

@@ -30,9 +30,10 @@
"start:mirage": "start () { MIRAGE_DEV_HANDLER=$1 yarn run start; }; start",
"test": "npm-run-all --print-name lint:js:quiet lint:hbs:quiet && node scripts/start-vault.js",
"test:enos": "npm-run-all lint:js:quiet lint:hbs:quiet && node scripts/enos-test-ember.js",
"test:oss": "yarn run test -f='!enterprise'",
"test:quick": "node scripts/start-vault.js",
"test:quick-oss": "yarn test:quick -f='!enterprise'",
"test:oss": "yarn run test -f='!enterprise' --split=8 --preserve-test-name --parallel",
"test:quick": "node scripts/start-vault.js --split=8 --preserve-test-name --parallel",
"test:quick-oss": "yarn test:quick -f='!enterprise' --split=8 --preserve-test-name --parallel",
"test:filter": "node scripts/start-vault.js --server -f='!enterprise'",
"types:declare": "declare () { yarn tsc $1 --declaration --allowJs --emitDeclarationOnly --experimentalDecorators --outDir $2; }; declare",
"vault": "VAULT_REDIRECT_ADDR=http://127.0.0.1:8200 vault server -log-level=error -dev -dev-root-token-id=root -dev-ha -dev-transactional",
"vault:cluster": "VAULT_REDIRECT_ADDR=http://127.0.0.1:8202 vault server -log-level=error -dev -dev-root-token-id=root -dev-listen-address=127.0.0.1:8202 -dev-ha -dev-transactional"
@@ -145,6 +146,7 @@
"ember-d3": "^0.5.1",
"ember-data": "~4.11.3",
"ember-engines": "0.8.23",
"ember-exam": "^9.0.0",
"ember-fetch": "^8.1.2",
"ember-inflector": "4.0.2",
"ember-load-initializers": "^2.1.2",

View File

@@ -69,7 +69,9 @@ async function processLines(input, eachLine = () => {}) {
}
});
try {
await testHelper.run('ember', ['test', ...process.argv.slice(2)]);
// only the test:filter command specifies --server by default
const verb = process.argv[2] === '--server' ? 'test' : 'exam';
await testHelper.run('ember', [verb, ...process.argv.slice(2)]);
} catch (error) {
console.log(error);
process.exit(1);

View File

@@ -28,9 +28,10 @@ module.exports = {
},
proxies: {
'/v1': {
target: 'http://localhost:9200',
target: 'http://127.0.0.1:9200',
},
},
parallel: process.env.EMBER_EXAM_SPLIT_COUNT || 1,
};
if (process.env.CI) {

View File

@@ -10,6 +10,7 @@ import { setupApplicationTest } from 'ember-qunit';
import enginesPage from 'vault/tests/pages/secrets/backends';
import authPage from 'vault/tests/pages/auth';
import consoleClass from 'vault/tests/pages/components/console/ui-panel';
import { v4 as uuidv4 } from 'uuid';
const consoleComponent = create(consoleClass);
@@ -21,28 +22,38 @@ module('Acceptance | console', function (hooks) {
});
test("refresh reloads the current route's data", async function (assert) {
assert.expect(6);
await enginesPage.visit();
await settled();
const numEngines = enginesPage.rows.length;
await consoleComponent.toggle();
await settled();
for (const num of [1, 2, 3]) {
const inputString = `write sys/mounts/console-route-${num} type=kv`;
const ids = [uuidv4(), uuidv4(), uuidv4()];
for (const id of ids) {
const inputString = `write sys/mounts/console-route-${id} type=kv`;
await consoleComponent.runCommands(inputString);
await settled();
}
await consoleComponent.runCommands('refresh');
await settled();
assert.strictEqual(enginesPage.rows.length, numEngines + 3, 'new engines were added to the page');
for (const id of ids) {
assert.ok(
enginesPage.rows.findOneBy('path', `console-route-${id}/`),
'new engine is shown on the page'
);
}
// Clean up
for (const num of [1, 2, 3]) {
const inputString = `delete sys/mounts/console-route-${num}`;
for (const id of ids) {
const inputString = `delete sys/mounts/console-route-${id}`;
await consoleComponent.runCommands(inputString);
await settled();
}
await consoleComponent.runCommands('refresh');
await settled();
assert.strictEqual(enginesPage.rows.length, numEngines, 'engines were removed from the page');
for (const id of ids) {
assert.throws(() => {
enginesPage.rows.findOneBy('path', `console-route-${id}/`);
}, 'engine was removed');
}
});
test('fullscreen command expands the cli panel', async function (assert) {

View File

@@ -75,7 +75,7 @@ module('Acceptance | oidc-config clients and assignments', function (hooks) {
});
test('it creates an assignment inline, creates a client, updates client to limit access, deletes client', async function (assert) {
assert.expect(22);
assert.expect(21);
//* clear out test state
await clearRecord(this.store, 'oidc/client', 'test-app');
@@ -197,11 +197,16 @@ module('Acceptance | oidc-config clients and assignments', function (hooks) {
assert.strictEqual(currentRouteName(), 'vault.cluster.access.oidc.clients.client.details');
await click(SELECTORS.clientDeleteButton);
await click(SELECTORS.confirmActionButton);
assert.strictEqual(
currentRouteName(),
'vault.cluster.access.oidc.index',
'redirects to call to action if only existing client is deleted'
);
//TODO this part of the test has a race condition
//because other tests could have created clients - there is no guarantee that this will be the last
//client in the list to redirect to the call to action
//assert.strictEqual(
//currentRouteName(),
//'vault.cluster.access.oidc.index',
//'redirects to call to action if only existing client is deleted'
//);
//* clean up test state
await clearRecord(this.store, 'oidc/assignment', 'assignment-inline');
});

View File

@@ -163,10 +163,11 @@ module('Acceptance | oidc provider', function (hooks) {
await authFormComponent.login();
await settled();
assert.strictEqual(currentURL(), url, 'URL is as expected after login');
assert.dom('[data-test-oidc-redirect]').exists('redirect text exists');
assert
.dom('[data-test-oidc-redirect]')
.hasTextContaining(`${callback}?code=`, 'Successful redirect to callback');
.hasTextContaining(`click here to go back to app`, 'Shows link back to app');
const link = document.querySelector('[data-test-oidc-redirect]').getAttribute('href');
assert.ok(link.includes('/callback?code='), 'Redirects to correct url');
//* clean up test state
await clearRecord(this.store, 'oidc/client', 'my-webapp');
@@ -191,7 +192,9 @@ module('Acceptance | oidc provider', function (hooks) {
await settled();
assert
.dom('[data-test-oidc-redirect]')
.hasTextContaining(`${callback}?code=`, 'Successful redirect to callback');
.hasTextContaining(`click here to go back to app`, 'Shows link back to app');
const link = document.querySelector('[data-test-oidc-redirect]').getAttribute('href');
assert.ok(link.includes('/callback?code='), 'Redirects to correct url');
//* clean up test state
await clearRecord(this.store, 'oidc/client', 'my-webapp');

View File

@@ -13,7 +13,7 @@ import enablePage from 'vault/tests/pages/settings/mount-secret-backend';
import { click, currentURL, fillIn, find, isSettled, visit } from '@ember/test-helpers';
import { SELECTORS } from 'vault/tests/helpers/pki/workflow';
import { adminPolicy, readerPolicy, updatePolicy } from 'vault/tests/helpers/policy-generator/pki';
import { tokenWithPolicy, runCommands } from 'vault/tests/helpers/pki/pki-run-commands';
import { tokenWithPolicy, runCommands, clearRecords } from 'vault/tests/helpers/pki/pki-run-commands';
import { unsupportedPem } from 'vault/tests/helpers/pki/values';
/**
@@ -25,12 +25,14 @@ module('Acceptance | pki workflow', function (hooks) {
setupApplicationTest(hooks);
hooks.beforeEach(async function () {
this.store = this.owner.lookup('service:store');
await authPage.login();
// Setup PKI engine
const mountPath = `pki-workflow-${uuidv4()}`;
await enablePage.enable('pki', mountPath);
this.mountPath = mountPath;
await logout.visit();
clearRecords(this.store);
});
hooks.afterEach(async function () {
@@ -40,40 +42,50 @@ module('Acceptance | pki workflow', function (hooks) {
await runCommands([`delete sys/mounts/${this.mountPath}`]);
});
test('empty state messages are correct when PKI not configured', async function (assert) {
assert.expect(21);
const assertEmptyState = (assert, resource) => {
assert.strictEqual(currentURL(), `/vault/secrets/${this.mountPath}/pki/${resource}`);
assert
.dom(SELECTORS.emptyStateTitle)
.hasText(
'PKI not configured',
`${resource} index renders correct empty state title when PKI not configured`
);
assert.dom(SELECTORS.emptyStateLink).hasText('Configure PKI');
assert
.dom(SELECTORS.emptyStateMessage)
.hasText(
`This PKI mount hasn't yet been configured with a certificate issuer.`,
`${resource} index empty state message correct when PKI not configured`
);
};
await authPage.login(this.pkiAdminToken);
await visit(`/vault/secrets/${this.mountPath}/pki/overview`);
assert.strictEqual(currentURL(), `/vault/secrets/${this.mountPath}/pki/overview`);
module('not configured', function (hooks) {
hooks.beforeEach(async function () {
await authPage.login();
const pki_admin_policy = adminPolicy(this.mountPath, 'roles');
this.pkiAdminToken = await tokenWithPolicy(`pki-admin-${this.mountPath}`, pki_admin_policy);
await logout.visit();
clearRecords(this.store);
});
await click(SELECTORS.rolesTab);
assertEmptyState(assert, 'roles');
test('empty state messages are correct when PKI not configured', async function (assert) {
assert.expect(21);
const assertEmptyState = (assert, resource) => {
assert.strictEqual(currentURL(), `/vault/secrets/${this.mountPath}/pki/${resource}`);
assert
.dom(SELECTORS.emptyStateTitle)
.hasText(
'PKI not configured',
`${resource} index renders correct empty state title when PKI not configured`
);
assert.dom(SELECTORS.emptyStateLink).hasText('Configure PKI');
assert
.dom(SELECTORS.emptyStateMessage)
.hasText(
`This PKI mount hasn't yet been configured with a certificate issuer.`,
`${resource} index empty state message correct when PKI not configured`
);
};
await authPage.login(this.pkiAdminToken);
await visit(`/vault/secrets/${this.mountPath}/pki/overview`);
assert.strictEqual(currentURL(), `/vault/secrets/${this.mountPath}/pki/overview`);
await click(SELECTORS.issuersTab);
assertEmptyState(assert, 'issuers');
await click(SELECTORS.rolesTab);
assertEmptyState(assert, 'roles');
await click(SELECTORS.certsTab);
assertEmptyState(assert, 'certificates');
await click(SELECTORS.keysTab);
assertEmptyState(assert, 'keys');
await click(SELECTORS.tidyTab);
assertEmptyState(assert, 'tidy');
await click(SELECTORS.issuersTab);
assertEmptyState(assert, 'issuers');
await click(SELECTORS.certsTab);
assertEmptyState(assert, 'certificates');
await click(SELECTORS.keysTab);
assertEmptyState(assert, 'keys');
await click(SELECTORS.tidyTab);
assertEmptyState(assert, 'tidy');
});
});
module('roles', function (hooks) {
@@ -91,10 +103,11 @@ module('Acceptance | pki workflow', function (hooks) {
const pki_admin_policy = adminPolicy(this.mountPath, 'roles');
const pki_reader_policy = readerPolicy(this.mountPath, 'roles');
const pki_editor_policy = updatePolicy(this.mountPath, 'roles');
this.pkiRoleReader = await tokenWithPolicy('pki-reader', pki_reader_policy);
this.pkiRoleEditor = await tokenWithPolicy('pki-editor', pki_editor_policy);
this.pkiAdminToken = await tokenWithPolicy('pki-admin', pki_admin_policy);
this.pkiRoleReader = await tokenWithPolicy(`pki-reader-${this.mountPath}`, pki_reader_policy);
this.pkiRoleEditor = await tokenWithPolicy(`pki-editor-${this.mountPath}`, pki_editor_policy);
this.pkiAdminToken = await tokenWithPolicy(`pki-admin-${this.mountPath}`, pki_admin_policy);
await logout.visit();
clearRecords(this.store);
});
test('shows correct items if user has all permissions', async function (assert) {
@@ -222,10 +235,11 @@ module('Acceptance | pki workflow', function (hooks) {
const pki_admin_policy = adminPolicy(this.mountPath);
const pki_reader_policy = readerPolicy(this.mountPath, 'keys', true);
const pki_editor_policy = updatePolicy(this.mountPath, 'keys');
this.pkiKeyReader = await tokenWithPolicy('pki-reader', pki_reader_policy);
this.pkiKeyEditor = await tokenWithPolicy('pki-editor', pki_editor_policy);
this.pkiAdminToken = await tokenWithPolicy('pki-admin', pki_admin_policy);
this.pkiKeyReader = await tokenWithPolicy(`pki-reader-${this.mountPath}`, pki_reader_policy);
this.pkiKeyEditor = await tokenWithPolicy(`pki-editor-${this.mountPath}`, pki_editor_policy);
this.pkiAdminToken = await tokenWithPolicy(`pki-admin-${this.mountPath}`, pki_admin_policy);
await logout.visit();
clearRecords(this.store);
});
test('shows correct items if user has all permissions', async function (assert) {
@@ -339,11 +353,14 @@ module('Acceptance | pki workflow', function (hooks) {
module('issuers', function (hooks) {
hooks.beforeEach(async function () {
await authPage.login();
const pki_admin_policy = adminPolicy(this.mountPath);
this.pkiAdminToken = await tokenWithPolicy(`pki-admin-${this.mountPath}`, pki_admin_policy);
// Configure engine with a default issuer
await runCommands([
`write ${this.mountPath}/root/generate/internal common_name="Hashicorp Test" name="Hashicorp Test"`,
]);
await logout.visit();
clearRecords(this.store);
});
test('lists the correct issuer metadata info', async function (assert) {
assert.expect(6);
@@ -373,7 +390,10 @@ module('Acceptance | pki workflow', function (hooks) {
capabilities = ["deny"]
}
`;
this.token = await tokenWithPolicy('pki-issuer-denied-policy', pki_issuer_denied_policy);
this.token = await tokenWithPolicy(
`pki-issuer-denied-policy-${this.mountPath}`,
pki_issuer_denied_policy
);
await logout.visit();
await authPage.login(this.token);
await visit(`/vault/secrets/${this.mountPath}/pki/overview`);
@@ -487,7 +507,10 @@ module('Acceptance | pki workflow', function (hooks) {
${adminPolicy(this.mountPath)}
${readerPolicy(this.mountPath, 'config/cluster')}
`;
this.mixedConfigCapabilities = await tokenWithPolicy('pki-reader', mixed_config_policy);
this.mixedConfigCapabilities = await tokenWithPolicy(
`pki-reader-${this.mountPath}`,
mixed_config_policy
);
await logout.visit();
});

View File

@@ -5,20 +5,23 @@
import { module, test } from 'qunit';
import { setupApplicationTest } from 'ember-qunit';
import { v4 as uuidv4 } from 'uuid';
import authPage from 'vault/tests/pages/auth';
import logout from 'vault/tests/pages/logout';
import enablePage from 'vault/tests/pages/settings/mount-secret-backend';
import { click, currentURL, currentRouteName, visit } from '@ember/test-helpers';
import { SELECTORS } from 'vault/tests/helpers/pki/overview';
import { tokenWithPolicy, runCommands } from 'vault/tests/helpers/pki/pki-run-commands';
import { tokenWithPolicy, runCommands, clearRecords } from 'vault/tests/helpers/pki/pki-run-commands';
module('Acceptance | pki overview', function (hooks) {
setupApplicationTest(hooks);
hooks.beforeEach(async function () {
this.store = this.owner.lookup('service:store');
await authPage.login();
// Setup PKI engine
const mountPath = `pki`;
const mountPath = `pki-${uuidv4()}`;
await enablePage.enable('pki', mountPath);
this.mountPath = mountPath;
await runCommands([`write ${this.mountPath}/root/generate/internal common_name="Hashicorp Test"`]);
@@ -42,6 +45,7 @@ module('Acceptance | pki overview', function (hooks) {
this.pkiIssuersList = await tokenWithPolicy('pki-issuers-list', pki_issuers_list_policy);
this.pkiAdminToken = await tokenWithPolicy('pki-admin', pki_admin_policy);
await logout.visit();
clearRecords(this.store);
});
hooks.afterEach(async function () {

View File

@@ -45,6 +45,7 @@ module('Acceptance | policies (old)', function (hooks) {
assert.dom('[data-test-policy-name]').hasText(policyLower, 'displays the policy name on the show page');
assert.dom('[data-test-flash-message].is-info').doesNotExist('no flash message is displayed on save');
await click('[data-test-policy-list-link] a');
await fillIn('[data-test-component="navigate-input"]', policyLower);
assert
.dom(`[data-test-policy-link="${policyLower}"]`)
.exists({ count: 1 }, 'new policy shown in the list');
@@ -63,6 +64,7 @@ module('Acceptance | policies (old)', function (hooks) {
`/vault/policies/acl`,
'navigates to policy list on successful deletion'
);
await fillIn('[data-test-component="navigate-input"]', policyLower);
assert
.dom(`[data-test-policy-item="${policyLower}"]`)
.doesNotExist('deleted policy is not shown in the list');

View File

@@ -38,7 +38,7 @@ module('Acceptance | kv-v2 workflow | secret and version create', function (hook
module('admin persona', function (hooks) {
hooks.beforeEach(async function () {
const token = await runCmd(tokenWithPolicyCmd('admin', personas.admin(this.backend)));
const token = await runCmd(tokenWithPolicyCmd(`admin-${this.backend}`, personas.admin(this.backend)));
await authPage.login(token);
clearRecords(this.store);
return;
@@ -282,7 +282,9 @@ module('Acceptance | kv-v2 workflow | secret and version create', function (hook
module('data-reader persona', function (hooks) {
hooks.beforeEach(async function () {
const token = await runCmd(tokenWithPolicyCmd('data-reader', personas.dataReader(this.backend)));
const token = await runCmd(
tokenWithPolicyCmd(`data-reader-${this.backend}`, personas.dataReader(this.backend))
);
await authPage.login(token);
clearRecords(this.store);
return;
@@ -423,7 +425,7 @@ module('Acceptance | kv-v2 workflow | secret and version create', function (hook
module('data-list-reader persona', function (hooks) {
hooks.beforeEach(async function () {
const token = await runCmd(
tokenWithPolicyCmd('data-list-reader', personas.dataListReader(this.backend))
tokenWithPolicyCmd(`data-list-reader-${this.backend}`, personas.dataListReader(this.backend))
);
await authPage.login(token);
clearRecords(this.store);
@@ -568,7 +570,7 @@ module('Acceptance | kv-v2 workflow | secret and version create', function (hook
module('metadata-maintainer persona', function (hooks) {
hooks.beforeEach(async function () {
const token = await runCmd(
tokenWithPolicyCmd('data-list-reader', personas.metadataMaintainer(this.backend))
tokenWithPolicyCmd(`data-list-reader-${this.backend}`, personas.metadataMaintainer(this.backend))
);
await authPage.login(token);
clearRecords(this.store);
@@ -764,7 +766,9 @@ module('Acceptance | kv-v2 workflow | secret and version create', function (hook
module('secret-creator persona', function (hooks) {
hooks.beforeEach(async function () {
const token = await runCmd(tokenWithPolicyCmd('secret-creator', personas.secretCreator(this.backend)));
const token = await runCmd(
tokenWithPolicyCmd(`secret-creator-${this.backend}`, personas.secretCreator(this.backend))
);
await authPage.login(token);
clearRecords(this.store);
return;
@@ -1006,7 +1010,10 @@ module('Acceptance | kv-v2 workflow | secret and version create', function (hook
module('secret-nested-creator persona', function (hooks) {
hooks.beforeEach(async function () {
const token = await runCmd(
tokenWithPolicyCmd('secret-nested-creator', personas.secretNestedCreator(this.backend))
tokenWithPolicyCmd(
`secret-nested-creator-${this.backend}`,
personas.secretNestedCreator(this.backend)
)
);
await authPage.login(token);
clearRecords(this.store);
@@ -1048,7 +1055,11 @@ path "${this.backend}/metadata/*" {
capabilities = ["list", "read"]
}
`;
const { userToken } = await setupControlGroup({ userPolicy });
const { userToken } = await setupControlGroup({
userPolicy,
backend: this.backend,
});
this.userToken = userToken;
await authPage.login(userToken);
clearRecords(this.store);
@@ -1089,11 +1100,12 @@ path "${this.backend}/metadata/*" {
'shows control group error'
);
await grantAccessForWrite({
accessor: tokenToUnwrap.accessor,
token: tokenToUnwrap.token,
accessor: tokenToUnwrap.accessor,
creation_path: `${backend}/data/${secretPath}`,
originUrl: `/vault/secrets/${backend}/kv/create`,
userToken: this.userToken,
backend: this.backend,
});
// In a real scenario the user would stay on page, but in the test
// we fill in the same info and try again
@@ -1161,6 +1173,7 @@ path "${this.backend}/metadata/*" {
creation_path: `${backend}/data/${secretPath}`,
originUrl: `/vault/secrets/${backend}/kv/${secretPath}/details/edit`,
userToken: this.userToken,
backend: this.backend,
});
// Remark for unwrap as if we never left the page.
this.controlGroup.markTokenForUnwrap(tokenToUnwrap.accessor);

View File

@@ -28,6 +28,11 @@ const assertDeleteActions = (assert, expected = ['delete', 'destroy']) => {
}
});
};
const makeToken = (name, mountPath, policyGenerator) => {
return tokenWithPolicyCmd(`${name}-${mountPath}`, policyGenerator(mountPath));
};
/**
* This test set is for testing delete, undelete, destroy flows
* Letter(s) in parenthesis at the end are shorthand for the persona,
@@ -58,7 +63,7 @@ module('Acceptance | kv-v2 workflow | delete, undelete, destroy', function (hook
module('admin persona', function (hooks) {
hooks.beforeEach(async function () {
const token = await runCmd(tokenWithPolicyCmd('admin', personas.admin(this.backend)));
const token = await runCmd(makeToken('admin', this.backend, personas.admin));
await authPage.login(token);
clearRecords(this.store);
return;
@@ -159,7 +164,7 @@ module('Acceptance | kv-v2 workflow | delete, undelete, destroy', function (hook
module('data-reader persona', function (hooks) {
hooks.beforeEach(async function () {
const token = await runCmd(tokenWithPolicyCmd('data-reader', personas.dataReader(this.backend)));
const token = await runCmd(makeToken('data-reader', this.backend, personas.dataReader));
await authPage.login(token);
clearRecords(this.store);
return;
@@ -206,9 +211,7 @@ module('Acceptance | kv-v2 workflow | delete, undelete, destroy', function (hook
module('data-list-reader persona', function (hooks) {
hooks.beforeEach(async function () {
const token = await runCmd(
tokenWithPolicyCmd('data-list-reader', personas.dataListReader(this.backend))
);
const token = await runCmd(makeToken('data-list-reader', this.backend, personas.dataListReader));
await authPage.login(token);
clearRecords(this.store);
return;
@@ -266,9 +269,7 @@ module('Acceptance | kv-v2 workflow | delete, undelete, destroy', function (hook
module('metadata-maintainer persona', function (hooks) {
hooks.beforeEach(async function () {
const token = await runCmd(
tokenWithPolicyCmd('metadata-maintainer', personas.metadataMaintainer(this.backend))
);
const token = await runCmd(makeToken('metadata-maintainer', this.backend, personas.metadataMaintainer));
await authPage.login(token);
clearRecords(this.store);
return;
@@ -358,7 +359,7 @@ module('Acceptance | kv-v2 workflow | delete, undelete, destroy', function (hook
module('secret-nested-creator persona', function (hooks) {
hooks.beforeEach(async function () {
const token = await runCmd(
tokenWithPolicyCmd('secret-nested-creator', personas.secretNestedCreator(this.backend))
makeToken('secret-nested-creator', this.backend, personas.secretNestedCreator)
);
await authPage.login(token);
clearRecords(this.store);
@@ -384,7 +385,7 @@ module('Acceptance | kv-v2 workflow | delete, undelete, destroy', function (hook
module('secret-creator persona', function (hooks) {
hooks.beforeEach(async function () {
const token = await runCmd(tokenWithPolicyCmd('secret-creator', personas.secretCreator(this.backend)));
const token = await runCmd(makeToken('secret-creator', this.backend, personas.secretCreator));
await authPage.login(token);
clearRecords(this.store);
return;
@@ -468,7 +469,7 @@ path "sys/control-group/request" {
}
`;
const { userToken } = await setupControlGroup({ userPolicy });
const { userToken } = await setupControlGroup({ userPolicy, backend: this.backend });
this.userToken = userToken;
await authPage.login(userToken);
clearRecords(this.store);

View File

@@ -59,11 +59,11 @@ module('Acceptance | kv-v2 workflow | edge cases', function (hooks) {
const backend = this.backend;
const token = await runCmd([
createPolicyCmd(
'nested-secret-list-reader',
`nested-secret-list-reader-${this.backend}`,
metadataPolicy({ backend, secretPath, capabilities }) +
dataPolicy({ backend, secretPath, capabilities })
),
createTokenCmd('nested-secret-list-reader'),
createTokenCmd(`nested-secret-list-reader-${this.backend}`),
]);
await authPage.login(token);
});
@@ -191,14 +191,14 @@ module('Acceptance | kv-v2 workflow | edge cases', function (hooks) {
// user has different permissions for each secret path
const token = await runCmd([
createPolicyCmd(
'destruction-no-read',
`destruction-no-read-${this.backend}`,
dataPolicy({ backend, secretPath: 'data-delete-only', capabilities: ['delete'] }) +
deleteVersionsPolicy({ backend, secretPath: 'delete-version-only' }) +
destroyVersionsPolicy({ backend, secretPath: 'destroy-version-only' }) +
metadataPolicy({ backend, secretPath: 'destroy-metadata-only', capabilities: ['delete'] }) +
metadataListPolicy(backend)
),
createTokenCmd('destruction-no-read'),
createTokenCmd(`destruction-no-read-${this.backend}`),
]);
for (const secret of testSecrets) {
await writeVersionedSecret(backend, secret, 'foo', 'bar', 2);

View File

@@ -360,10 +360,10 @@ module('Acceptance | kv-v2 workflow | navigation', function (hooks) {
hooks.beforeEach(async function () {
const token = await runCmd([
createPolicyCmd(
'data-reader',
`data-reader-${this.backend}`,
personas.dataReader(this.backend) + personas.dataReader(this.emptyBackend)
),
createTokenCmd('data-reader'),
createTokenCmd(`data-reader-${this.backend}`),
]);
await authPage.login(token);
clearRecords(this.store);
@@ -540,10 +540,10 @@ module('Acceptance | kv-v2 workflow | navigation', function (hooks) {
hooks.beforeEach(async function () {
const token = await runCmd([
createPolicyCmd(
'data-reader-list',
`data-reader-list-${this.backend}`,
personas.dataListReader(this.backend) + personas.dataListReader(this.emptyBackend)
),
createTokenCmd('data-reader-list'),
createTokenCmd(`data-reader-list-${this.backend}`),
]);
await authPage.login(token);
@@ -727,10 +727,10 @@ module('Acceptance | kv-v2 workflow | navigation', function (hooks) {
hooks.beforeEach(async function () {
const token = await runCmd([
createPolicyCmd(
'metadata-maintainer',
`metadata-maintainer-${this.backend}`,
personas.metadataMaintainer(this.backend) + personas.metadataMaintainer(this.emptyBackend)
),
createTokenCmd('metadata-maintainer'),
createTokenCmd(`metadata-maintainer-${this.backend}`),
]);
await authPage.login(token);
clearRecords(this.store);
@@ -945,10 +945,10 @@ module('Acceptance | kv-v2 workflow | navigation', function (hooks) {
hooks.beforeEach(async function () {
const token = await runCmd([
createPolicyCmd(
'secret-creator',
`secret-creator-${this.backend}`,
personas.secretCreator(this.backend) + personas.secretCreator(this.emptyBackend)
),
createTokenCmd('secret-creator'),
createTokenCmd(`secret-creator-${this.backend}`),
]);
await authPage.login(token);
clearRecords(this.store);
@@ -1163,7 +1163,7 @@ path "${this.backend}/*" {
capabilities = ["list"]
}
`;
const { userToken } = await setupControlGroup({ userPolicy });
const { userToken } = await setupControlGroup({ userPolicy, backend: this.backend });
this.userToken = userToken;
await authPage.login(userToken);
clearRecords(this.store);
@@ -1204,6 +1204,7 @@ path "${this.backend}/*" {
apiPath: `${backend}/data/app/nested/secret`,
originUrl: `/vault/secrets/${backend}/kv/list/app/nested/`,
userToken: this.userToken,
backend: this.backend,
});
assert.strictEqual(
currentURL(),
@@ -1254,11 +1255,11 @@ path "${this.backend}/*" {
await waitUntil(() => currentRouteName() === 'vault.cluster.access.control-group-accessor'),
'redirects to access control group route'
);
await grantAccess({
apiPath: `${backend}/data/${encodeURIComponent(secretPath)}`,
originUrl: `/vault/secrets/${backend}/kv/list`,
userToken: this.userToken,
backend: this.backend,
});
assert.strictEqual(

View File

@@ -20,6 +20,9 @@ import { PAGE } from 'vault/tests/helpers/kv/kv-selectors';
import { click, currentRouteName, currentURL, visit, waitUntil } from '@ember/test-helpers';
import { grantAccess, setupControlGroup } from 'vault/tests/helpers/control-groups';
const makeToken = (name, mountPath, policyGenerator) => {
return tokenWithPolicyCmd(`${name}-${mountPath}`, policyGenerator(mountPath));
};
/**
* This test set is for testing version history & path pages for secret.
* Letter(s) in parenthesis at the end are shorthand for the persona,
@@ -52,7 +55,7 @@ module('Acceptance | kv-v2 workflow | version history, paths', function (hooks)
module('admin persona', function (hooks) {
hooks.beforeEach(async function () {
const token = await runCmd(tokenWithPolicyCmd('admin', personas.admin(this.backend)));
const token = await runCmd(makeToken('admin', this.backend, personas.admin));
await authPage.login(token);
clearRecords(this.store);
});
@@ -101,7 +104,7 @@ module('Acceptance | kv-v2 workflow | version history, paths', function (hooks)
module('data-reader persona', function (hooks) {
hooks.beforeEach(async function () {
const token = await runCmd(tokenWithPolicyCmd('data-reader', personas.dataReader(this.backend)));
const token = await runCmd(makeToken('data-reader', this.backend, personas.dataReader));
await authPage.login(token);
clearRecords(this.store);
});
@@ -128,9 +131,7 @@ module('Acceptance | kv-v2 workflow | version history, paths', function (hooks)
module('data-list-reader persona', function (hooks) {
hooks.beforeEach(async function () {
const token = await runCmd(
tokenWithPolicyCmd('data-list-reader', personas.dataListReader(this.backend))
);
const token = await runCmd(makeToken('data-list-reader', this.backend, personas.dataListReader));
await authPage.login(token);
clearRecords(this.store);
});
@@ -157,9 +158,7 @@ module('Acceptance | kv-v2 workflow | version history, paths', function (hooks)
module('metadata-maintainer persona', function (hooks) {
hooks.beforeEach(async function () {
const token = await runCmd(
tokenWithPolicyCmd('metadata-maintainer', personas.metadataMaintainer(this.backend))
);
const token = await runCmd(makeToken('metadata-maintainer', this.backend, personas.metadataMaintainer));
await authPage.login(token);
clearRecords(this.store);
});
@@ -208,7 +207,7 @@ module('Acceptance | kv-v2 workflow | version history, paths', function (hooks)
module('secret-creator persona', function (hooks) {
hooks.beforeEach(async function () {
const token = await runCmd(tokenWithPolicyCmd('secret-creator', personas.secretCreator(this.backend)));
const token = await runCmd(makeToken('secret-creator', this.backend, personas.secretCreator));
await authPage.login(token);
clearRecords(this.store);
});
@@ -254,7 +253,7 @@ path "${this.backend}/*" {
capabilities = ["list"]
}
`;
const { userToken } = await setupControlGroup({ userPolicy });
const { userToken } = await setupControlGroup({ userPolicy, backend: this.backend });
this.userToken = userToken;
await authPage.login(userToken);
clearRecords(this.store);
@@ -270,6 +269,7 @@ path "${this.backend}/*" {
apiPath: `${this.backend}/metadata/${this.secretPath}`,
originUrl: `/vault/secrets/${this.urlPath}/details`,
userToken: this.userToken,
backend: this.backend,
});
assert.strictEqual(
currentURL(),

View File

@@ -9,7 +9,7 @@ import { setupMirage } from 'ember-cli-mirage/test-support';
import syncScenario from 'vault/mirage/scenarios/sync';
import syncHandlers from 'vault/mirage/handlers/sync';
import authPage from 'vault/tests/pages/auth';
import { click, visit } from '@ember/test-helpers';
import { click, visit, fillIn } from '@ember/test-helpers';
import { PAGE } from 'vault/tests/helpers/sync/sync-selectors';
const { searchSelect, filter, listItem } = PAGE;
@@ -29,6 +29,11 @@ module('Acceptance | sync | destinations', function (hooks) {
assert.dom(listItem).exists({ count: 6 }, 'All destinations render');
await click(`${filter('type')} .ember-basic-dropdown-trigger`);
await click(searchSelect.option());
assert.dom(listItem).exists({ count: 2 }, 'Filtered destinations render');
assert.dom(listItem).exists({ count: 2 }, 'Destinations are filtered by type');
await fillIn(filter('name'), 'new');
assert.dom(listItem).exists({ count: 1 }, 'Destinations are filtered by type and name');
await click(searchSelect.removeSelected);
await fillIn(filter('name'), 'gcp');
assert.dom(listItem).exists({ count: 1 }, 'Destinations are filtered by name');
});
});

View File

@@ -12,7 +12,7 @@ import { pollCluster } from 'vault/tests/helpers/poll-cluster';
const { unsealKeys } = VAULT_KEYS;
module('Acceptance | unseal', function (hooks) {
module.skip('Acceptance | unseal', function (hooks) {
setupApplicationTest(hooks);
hooks.beforeEach(function () {

View File

@@ -5,8 +5,8 @@
import { click, visit } from '@ember/test-helpers';
import { create } from 'ember-cli-page-object';
import { CONTROL_GROUP_PREFIX, TOKEN_SEPARATOR } from 'vault/services/control-group';
import { CONTROL_GROUP_PREFIX, TOKEN_SEPARATOR } from 'vault/services/control-group';
import authPage from 'vault/tests/pages/auth';
import controlGroup from 'vault/tests/pages/components/control-group';
import { createPolicyCmd, createTokenCmd, mountAuthCmd, runCmd } from './commands';
@@ -16,13 +16,22 @@ const storageKey = (accessor, path) => {
return `${CONTROL_GROUP_PREFIX}${accessor}${TOKEN_SEPARATOR}${path}`;
};
// This function is used to setup a control group for testing
// It will create a userpass backend, create an authorizing user,
// and create a controlled access token. The auth mount and policy
// names will be appended with the backend
export const setupControlGroup = async ({
userPolicy,
backend,
adminUser = 'authorizer',
adminPassword = 'password',
userpassMount = 'userpass',
adminPassword = 'testing-xyz',
}) => {
const userPolicyName = 'kv-control-group';
if (!backend || !userPolicy) {
throw new Error('missing required fields for setupControlGroup');
}
const userpassMount = `userpass-${backend}`;
const userPolicyName = `kv-control-group-${backend}`;
const authorizerPolicyName = `authorizer-${backend}`;
const authorizerPolicy = `
path "sys/control-group/authorize" {
capabilities = ["update"]
@@ -35,7 +44,7 @@ export const setupControlGroup = async ({
const userpassAccessor = await runCmd([
// write policies for control group + authorization
createPolicyCmd(userPolicyName, userPolicy),
createPolicyCmd('authorizer', authorizerPolicy),
createPolicyCmd(authorizerPolicyName, authorizerPolicy),
// enable userpass, create admin user
mountAuthCmd('userpass', userpassMount),
// read out mount to get the accessor
@@ -50,14 +59,13 @@ export const setupControlGroup = async ({
const userToken = await runCmd([
// create alias for authorizor and add them to the managers group
`write identity/alias mount_accessor=${userpassAccessor} entity_id=${authorizerEntityId} name=${adminUser}`,
`write identity/group name=managers member_entity_ids=${authorizerEntityId} policies=authorizer`,
`write identity/group name=managers member_entity_ids=${authorizerEntityId} policies=${authorizerPolicyName}`,
// create a token to request access to kv/foo
createTokenCmd(userPolicyName),
]);
return {
userToken,
userPolicyName,
userPolicy,
adminUser,
adminPassword,
userpassMount,
@@ -70,10 +78,15 @@ export async function grantAccessForWrite({
creation_path,
originUrl,
userToken,
backend,
authorizerUser = 'authorizer',
authorizerPassword = 'password',
authorizerPassword = 'testing-xyz',
}) {
await authPage.loginUsername(authorizerUser, authorizerPassword);
if (!token || !accessor || !creation_path || !originUrl || !userToken || !backend) {
throw new Error('missing required fields for grantAccessForWrite');
}
const userpassMount = `userpass-${backend}`;
await authPage.loginUsername(authorizerUser, authorizerPassword, userpassMount);
await visit(`/vault/access/control-groups/${accessor}`);
await controlGroupComponent.authorize();
await authPage.login(userToken);
@@ -91,21 +104,26 @@ export async function grantAccessForWrite({
await visit(originUrl);
}
/*
* Control group grant access flow
* Assumes start on route 'vault.cluster.access.control-group-accessor'
* and authorizer login is via userpass
*/
export async function grantAccess({
apiPath,
originUrl,
userToken,
backend,
authorizerUser = 'authorizer',
authorizerPassword = 'password',
authorizerPassword = 'testing-xyz',
}) {
/*
* Control group grant access flow
* Assumes start on route 'vault.cluster.access.control-group-accessor'
* and authorizer login is via userpass
*/
if (!apiPath || !originUrl || !userToken || !backend) {
throw new Error('missing required fields for grantAccess');
}
const userpassMount = `userpass-${backend}`;
const accessor = controlGroupComponent.accessor;
const controlGroupToken = controlGroupComponent.token;
await authPage.loginUsername(authorizerUser, authorizerPassword);
await authPage.loginUsername(authorizerUser, authorizerPassword, userpassMount);
await visit(`/vault/access/control-groups/${accessor}`);
await controlGroupComponent.authorize();
await authPage.login(userToken);

View File

@@ -14,6 +14,7 @@ export const SELECTORS = {
icon: (name) => `[data-test-icon="${name}"]`,
tab: (name) => `[data-test-tab="${name}"]`,
filter: (name) => `[data-test-filter="${name}"]`,
filterInput: '[data-test-filter-input]',
confirmModalInput: '[data-test-confirmation-modal-input]',
confirmButton: '[data-test-confirm-button]',
emptyStateTitle: '[data-test-empty-state-title]',

View File

@@ -34,3 +34,21 @@ export const runCommands = async function (commands) {
throw error;
}
};
// Clears pki-related data and capabilities so that admin
// capabilities from setup don't rollover
export function clearRecords(store) {
store.unloadAll('pki/action');
store.unloadAll('pki/issuer');
store.unloadAll('pki/key');
store.unloadAll('pki/role');
store.unloadAll('pki/sign-intermediate');
store.unloadAll('pki/tidy');
store.unloadAll('pki/config/urls');
store.unloadAll('pki/config/crl');
store.unloadAll('pki/config/cluster');
store.unloadAll('pki/config/acme');
store.unloadAll('pki/certificate/generate');
store.unloadAll('pki/certificate/sign');
store.unloadAll('capabilities');
}

View File

@@ -197,10 +197,12 @@ module('Integration | Component | InfoTableRow', function (hooks) {
});
test('Truncates the label if too long', async function (assert) {
this.set('label', 'abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyz');
await render(hbs`<InfoTableRow
await render(hbs`<div style="width: 100px;">
<InfoTableRow
@label={{this.label}}
@value={{this.value}}
/>`);
/>
</div>`);
assert.dom('[data-test-component="info-table-row"]').exists('Row renders');
assert.dom('[data-test-label-div].label-overflow').exists('Label has class label-overflow');
await triggerEvent('[data-test-row-label]', 'mouseenter');

View File

@@ -240,8 +240,8 @@ module('Integration | Component | kv-v2 | Page::Secret::Details', function (hook
.exists('renders current version icon');
});
test('it renders sync status page alert', async function (assert) {
assert.expect(5); // assert count important because confirms request made to fetch sync status twice
test('it renders sync status page alert and refreshes', async function (assert) {
assert.expect(6); // assert count important because confirms request made to fetch sync status twice
const destinationName = 'my-destination';
this.server.create('sync-association', {
type: 'aws-sm',
@@ -250,7 +250,7 @@ module('Integration | Component | kv-v2 | Page::Secret::Details', function (hook
secret_name: this.path,
});
this.server.get(`sys/sync/associations/destinations`, (schema, req) => {
// this assertion should be hit twice, once on init and again when the 'Refresh' button is clicked
// these assertions should be hit twice, once on init and again when the 'Refresh' button is clicked
assert.ok(true, 'request made to fetch sync status');
assert.propEqual(
req.queryParams,
@@ -281,8 +281,56 @@ module('Integration | Component | kv-v2 | Page::Secret::Details', function (hook
'Synced my-destination - last updated September',
'renders sync status alert banner'
);
assert
.dom(PAGE.detail.syncAlert())
.hasTextContaining(
'This secret has been synced from Vault to 1 destination. Updates to this secret will automatically sync to its destination.',
'renders alert header referring to singular destination'
);
// sync status refresh button
await click(`${PAGE.detail.syncAlert()} button`);
});
test('it renders sync status page alert for multiple destinations', async function (assert) {
assert.expect(3); // assert count important because confirms request made to fetch sync status twice
this.server.create('sync-association', {
type: 'aws-sm',
name: 'aws-dest',
mount: this.backend,
secret_name: this.path,
});
this.server.create('sync-association', {
type: 'gh',
name: 'gh-dest',
mount: this.backend,
secret_name: this.path,
});
this.server.get(`sys/sync/associations/destinations`, (schema, req) => {
return syncStatusResponse(schema, req);
});
await render(
hbs`
<Page::Secret::Details
@path={{this.model.path}}
@secret={{this.model.secret}}
@metadata={{this.model.metadata}}
@breadcrumbs={{this.breadcrumbs}}
/>
`,
{ owner: this.engine }
);
assert
.dom(PAGE.detail.syncAlert('aws-dest'))
.hasTextContaining('Synced aws-dest - last updated September', 'renders status for aws destination');
assert
.dom(PAGE.detail.syncAlert('gh-dest'))
.hasTextContaining('Syncing gh-dest - last updated September', 'renders status for gh destination');
assert
.dom(PAGE.detail.syncAlert())
.hasTextContaining(
'This secret has been synced from Vault to 2 destinations. Updates to this secret will automatically sync to its destinations.',
'renders alert title referring to plural destinations'
);
});
});

View File

@@ -7,7 +7,7 @@ import { module, test } from 'qunit';
import { setupRenderingTest } from 'ember-qunit';
import { setupEngine } from 'ember-engines/test-support';
import { setupMirage } from 'ember-cli-mirage/test-support';
import { render, click } from '@ember/test-helpers';
import { render, click, fillIn } from '@ember/test-helpers';
import hbs from 'htmlbars-inline-precompile';
import { allowAllCapabilitiesStub } from 'vault/tests/helpers/stubs';
import sinon from 'sinon';
@@ -97,8 +97,7 @@ module('Integration | Component | sync | Page::Destinations', function (hooks) {
);
// NAME FILTER
await click(`${filter('name')} .ember-basic-dropdown-trigger`);
await click(searchSelect.option(searchSelect.optionIndex('destination-aws')));
await fillIn(filter('name'), 'destination-aws');
assert.deepEqual(
this.transitionStub.lastCall.args,
['vault.cluster.sync.secrets.destinations', { queryParams: { name: 'destination-aws' } }],

View File

@@ -256,11 +256,11 @@ module('Integration | Component | sync | Secrets::Page::Destinations::CreateAndE
assert.dom(PAGE.title).hasTextContaining(`Edit ${this.model.name}`);
for (const attr of this.model.formFields) {
// Enable inputs with sensitive values
if (maskedParams.includes(attr.name)) {
await click(PAGE.form.enableInput(attr.name));
}
if (editable.includes(attr.name)) {
if (maskedParams.includes(attr.name)) {
// Enable inputs with sensitive values
await click(PAGE.form.enableInput(attr.name));
}
await PAGE.form.fillInByAttr(attr.name, `new-${decamelize(attr.name)}-value`);
} else {
assert.dom(PAGE.inputByAttr(attr.name)).isDisabled(`${attr.name} is disabled`);

View File

@@ -105,7 +105,7 @@ module('Integration | Component | sync | Page::Overview', function (hooks) {
await click(actionToggle(0));
assert.dom(action('sync')).hasText('Sync secrets', 'Sync action renders');
assert.dom(action('details')).hasText('Details', 'Details action renders');
assert.dom(action('details')).hasText('View synced secrets', 'View synced secrets action renders');
});
test('it should paginate secrets by destination table', async function (assert) {

View File

@@ -8,7 +8,7 @@ import config from 'vault/config/environment';
import * as QUnit from 'qunit';
import { setApplication } from '@ember/test-helpers';
import { setup } from 'qunit-dom';
import { start } from 'ember-qunit';
import start from 'ember-exam/test-support/start';
import './helpers/flash-message';
import preloadAssets from 'ember-asset-loader/test-support/preload-assets';
import { setupGlobalA11yHooks, setRunOptions } from 'ember-a11y-testing/test-support';

View File

@@ -67,7 +67,7 @@ __metadata:
languageName: node
linkType: hard
"@babel/code-frame@npm:^7.22.13":
"@babel/code-frame@npm:^7.22.13, @babel/code-frame@npm:^7.23.5":
version: 7.23.5
resolution: "@babel/code-frame@npm:7.23.5"
dependencies:
@@ -266,6 +266,29 @@ __metadata:
languageName: node
linkType: hard
"@babel/core@npm:^7.23.6":
version: 7.23.7
resolution: "@babel/core@npm:7.23.7"
dependencies:
"@ampproject/remapping": ^2.2.0
"@babel/code-frame": ^7.23.5
"@babel/generator": ^7.23.6
"@babel/helper-compilation-targets": ^7.23.6
"@babel/helper-module-transforms": ^7.23.3
"@babel/helpers": ^7.23.7
"@babel/parser": ^7.23.6
"@babel/template": ^7.22.15
"@babel/traverse": ^7.23.7
"@babel/types": ^7.23.6
convert-source-map: ^2.0.0
debug: ^4.1.0
gensync: ^1.0.0-beta.2
json5: ^2.2.3
semver: ^6.3.1
checksum: 32d5bf73372a47429afaae9adb0af39e47bcea6a831c4b5dcbb4791380cda6949cb8cb1a2fea8b60bb1ebe189209c80e333903df1fa8e9dcb04798c0ce5bf59e
languageName: node
linkType: hard
"@babel/eslint-parser@npm:^7.21.3":
version: 7.22.9
resolution: "@babel/eslint-parser@npm:7.22.9"
@@ -336,6 +359,18 @@ __metadata:
languageName: node
linkType: hard
"@babel/generator@npm:^7.23.6":
version: 7.23.6
resolution: "@babel/generator@npm:7.23.6"
dependencies:
"@babel/types": ^7.23.6
"@jridgewell/gen-mapping": ^0.3.2
"@jridgewell/trace-mapping": ^0.3.17
jsesc: ^2.5.1
checksum: 1a1a1c4eac210f174cd108d479464d053930a812798e09fee069377de39a893422df5b5b146199ead7239ae6d3a04697b45fc9ac6e38e0f6b76374390f91fc6c
languageName: node
linkType: hard
"@babel/helper-annotate-as-pure@npm:^7.12.13, @babel/helper-annotate-as-pure@npm:^7.16.0":
version: 7.16.0
resolution: "@babel/helper-annotate-as-pure@npm:7.16.0"
@@ -485,6 +520,19 @@ __metadata:
languageName: node
linkType: hard
"@babel/helper-compilation-targets@npm:^7.23.6":
version: 7.23.6
resolution: "@babel/helper-compilation-targets@npm:7.23.6"
dependencies:
"@babel/compat-data": ^7.23.5
"@babel/helper-validator-option": ^7.23.5
browserslist: ^4.22.2
lru-cache: ^5.1.1
semver: ^6.3.1
checksum: c630b98d4527ac8fe2c58d9a06e785dfb2b73ec71b7c4f2ddf90f814b5f75b547f3c015f110a010fd31f76e3864daaf09f3adcd2f6acdbfb18a8de3a48717590
languageName: node
linkType: hard
"@babel/helper-create-class-features-plugin@npm:^7.13.0, @babel/helper-create-class-features-plugin@npm:^7.16.0, @babel/helper-create-class-features-plugin@npm:^7.8.3":
version: 7.16.0
resolution: "@babel/helper-create-class-features-plugin@npm:7.16.0"
@@ -1574,6 +1622,17 @@ __metadata:
languageName: node
linkType: hard
"@babel/helpers@npm:^7.23.7":
version: 7.23.7
resolution: "@babel/helpers@npm:7.23.7"
dependencies:
"@babel/template": ^7.22.15
"@babel/traverse": ^7.23.7
"@babel/types": ^7.23.6
checksum: 4f3bdf35fb54ff79107c6020ba1e36a38213a15b05ca0fa06c553b65f566e185fba6339fb3344be04593ebc244ed0bbb0c6087e73effe0d053a30bcd2db3a013
languageName: node
linkType: hard
"@babel/highlight@npm:^7.12.13, @babel/highlight@npm:^7.16.0":
version: 7.16.0
resolution: "@babel/highlight@npm:7.16.0"
@@ -1692,6 +1751,15 @@ __metadata:
languageName: node
linkType: hard
"@babel/parser@npm:^7.23.6":
version: 7.23.6
resolution: "@babel/parser@npm:7.23.6"
bin:
parser: ./bin/babel-parser.js
checksum: 140801c43731a6c41fd193f5c02bc71fd647a0360ca616b23d2db8be4b9739b9f951a03fc7c2db4f9b9214f4b27c1074db0f18bc3fa653783082d5af7c8860d5
languageName: node
linkType: hard
"@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression@npm:^7.16.7":
version: 7.16.7
resolution: "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression@npm:7.16.7"
@@ -5088,6 +5156,24 @@ __metadata:
languageName: node
linkType: hard
"@babel/traverse@npm:^7.23.7":
version: 7.23.7
resolution: "@babel/traverse@npm:7.23.7"
dependencies:
"@babel/code-frame": ^7.23.5
"@babel/generator": ^7.23.6
"@babel/helper-environment-visitor": ^7.22.20
"@babel/helper-function-name": ^7.23.0
"@babel/helper-hoist-variables": ^7.22.5
"@babel/helper-split-export-declaration": ^7.22.6
"@babel/parser": ^7.23.6
"@babel/types": ^7.23.6
debug: ^4.3.1
globals: ^11.1.0
checksum: d4a7afb922361f710efc97b1e25ec343fab8b2a4ddc81ca84f9a153f22d4482112cba8f263774be8d297918b6c4767c7a98988ab4e53ac73686c986711dd002e
languageName: node
linkType: hard
"@babel/types@npm:^7.1.6, @babel/types@npm:^7.7.2":
version: 7.14.0
resolution: "@babel/types@npm:7.14.0"
@@ -5172,6 +5258,17 @@ __metadata:
languageName: node
linkType: hard
"@babel/types@npm:^7.23.6":
version: 7.23.6
resolution: "@babel/types@npm:7.23.6"
dependencies:
"@babel/helper-string-parser": ^7.23.4
"@babel/helper-validator-identifier": ^7.22.20
to-fast-properties: ^2.0.0
checksum: 68187dbec0d637f79bc96263ac95ec8b06d424396678e7e225492be866414ce28ebc918a75354d4c28659be6efe30020b4f0f6df81cc418a2d30645b690a8de0
languageName: node
linkType: hard
"@babel/types@npm:^7.8.3":
version: 7.21.4
resolution: "@babel/types@npm:7.21.4"
@@ -5195,6 +5292,13 @@ __metadata:
languageName: node
linkType: hard
"@colors/colors@npm:1.5.0":
version: 1.5.0
resolution: "@colors/colors@npm:1.5.0"
checksum: d64d5260bed1d5012ae3fc617d38d1afc0329fec05342f4e6b838f46998855ba56e0a73833f4a80fa8378c84810da254f76a8a19c39d038260dc06dc4e007425
languageName: node
linkType: hard
"@csstools/css-parser-algorithms@npm:^2.3.0":
version: 2.3.0
resolution: "@csstools/css-parser-algorithms@npm:2.3.0"
@@ -6207,6 +6311,20 @@ __metadata:
languageName: node
linkType: hard
"@isaacs/cliui@npm:^8.0.2":
version: 8.0.2
resolution: "@isaacs/cliui@npm:8.0.2"
dependencies:
string-width: ^5.1.2
string-width-cjs: "npm:string-width@^4.2.0"
strip-ansi: ^7.0.1
strip-ansi-cjs: "npm:strip-ansi@^6.0.1"
wrap-ansi: ^8.1.0
wrap-ansi-cjs: "npm:wrap-ansi@^7.0.0"
checksum: 4a473b9b32a7d4d3cfb7a614226e555091ff0c5a29a1734c28c72a182c2f6699b26fc6b5c2131dfd841e86b185aea714c72201d7c98c2fba5f17709333a67aeb
languageName: node
linkType: hard
"@jridgewell/gen-mapping@npm:^0.3.0":
version: 0.3.1
resolution: "@jridgewell/gen-mapping@npm:0.3.1"
@@ -6483,6 +6601,13 @@ __metadata:
languageName: node
linkType: hard
"@pkgjs/parseargs@npm:^0.11.0":
version: 0.11.0
resolution: "@pkgjs/parseargs@npm:0.11.0"
checksum: 6ad6a00fc4f2f2cfc6bff76fb1d88b8ee20bc0601e18ebb01b6d4be583733a860239a521a7fbca73b612e66705078809483549d2b18f370eb346c5155c8e4a0f
languageName: node
linkType: hard
"@popperjs/core@npm:^2.9.0":
version: 2.11.8
resolution: "@popperjs/core@npm:2.11.8"
@@ -7844,6 +7969,15 @@ __metadata:
languageName: node
linkType: hard
"abort-controller@npm:^3.0.0":
version: 3.0.0
resolution: "abort-controller@npm:3.0.0"
dependencies:
event-target-shim: ^5.0.0
checksum: 170bdba9b47b7e65906a28c8ce4f38a7a369d78e2271706f020849c1bfe0ee2067d4261df8bbb66eb84f79208fd5b710df759d64191db58cfba7ce8ef9c54b75
languageName: node
linkType: hard
"abortcontroller-polyfill@npm:^1.7.3":
version: 1.7.3
resolution: "abortcontroller-polyfill@npm:1.7.3"
@@ -8216,6 +8350,13 @@ __metadata:
languageName: node
linkType: hard
"ansi-regex@npm:^6.0.1":
version: 6.0.1
resolution: "ansi-regex@npm:6.0.1"
checksum: 1ff8b7667cded1de4fa2c9ae283e979fc87036864317da86a2e546725f96406746411d0d85e87a2d12fa5abd715d90006de7fa4fa0477c92321ad3b4c7d4e169
languageName: node
linkType: hard
"ansi-styles@npm:^2.2.1":
version: 2.2.1
resolution: "ansi-styles@npm:2.2.1"
@@ -8241,6 +8382,13 @@ __metadata:
languageName: node
linkType: hard
"ansi-styles@npm:^6.1.0":
version: 6.2.1
resolution: "ansi-styles@npm:6.2.1"
checksum: ef940f2f0ced1a6347398da88a91da7930c33ecac3c77b72c5905f8b8fe402c52e6fde304ff5347f616e27a742da3f1dc76de98f6866c69251ad0b07a66776d9
languageName: node
linkType: hard
"ansi-styles@npm:~1.0.0":
version: 1.0.0
resolution: "ansi-styles@npm:1.0.0"
@@ -8320,6 +8468,16 @@ __metadata:
languageName: node
linkType: hard
"are-we-there-yet@npm:^4.0.0":
version: 4.0.1
resolution: "are-we-there-yet@npm:4.0.1"
dependencies:
delegates: ^1.0.0
readable-stream: ^4.1.0
checksum: 16871ee259e138bfab60800ae5b53406fb1b72b5d356f98b13c1b222bb2a13d9bc4292d79f4521fb0eca10874eb3838ae0d9f721f3bb34ddd37ee8f949831800
languageName: node
linkType: hard
"argparse@npm:^1.0.7":
version: 1.0.10
resolution: "argparse@npm:1.0.10"
@@ -11082,7 +11240,7 @@ __metadata:
languageName: node
linkType: hard
"browserslist@npm:^4.22.1":
"browserslist@npm:^4.22.1, browserslist@npm:^4.22.2":
version: 4.22.2
resolution: "browserslist@npm:4.22.2"
dependencies:
@@ -11140,6 +11298,16 @@ __metadata:
languageName: node
linkType: hard
"buffer@npm:^6.0.3":
version: 6.0.3
resolution: "buffer@npm:6.0.3"
dependencies:
base64-js: ^1.3.1
ieee754: ^1.2.1
checksum: 5ad23293d9a731e4318e420025800b42bf0d264004c0286c8cc010af7a270c7a0f6522e84f54b9ad65cbd6db20b8badbfd8d2ebf4f80fa03dab093b89e68c3f9
languageName: node
linkType: hard
"builtin-status-codes@npm:^3.0.0":
version: 3.0.0
resolution: "builtin-status-codes@npm:3.0.0"
@@ -11432,7 +11600,7 @@ __metadata:
languageName: node
linkType: hard
"chalk@npm:^5.2.0":
"chalk@npm:^5.2.0, chalk@npm:^5.3.0":
version: 5.3.0
resolution: "chalk@npm:5.3.0"
checksum: 623922e077b7d1e9dedaea6f8b9e9352921f8ae3afe739132e0e00c275971bdd331268183b2628cf4ab1727c45ea1f28d7e24ac23ce1db1eb653c414ca8a5a80
@@ -11703,6 +11871,19 @@ __metadata:
languageName: node
linkType: hard
"cli-table3@npm:^0.6.0":
version: 0.6.3
resolution: "cli-table3@npm:0.6.3"
dependencies:
"@colors/colors": 1.5.0
string-width: ^4.2.0
dependenciesMeta:
"@colors/colors":
optional: true
checksum: 09897f68467973f827c04e7eaadf13b55f8aec49ecd6647cc276386ea660059322e2dd8020a8b6b84d422dbdd619597046fa89cbbbdc95b2cea149a2df7c096c
languageName: node
linkType: hard
"cli-table@npm:^0.3.1":
version: 0.3.6
resolution: "cli-table@npm:0.3.6"
@@ -12120,6 +12301,13 @@ __metadata:
languageName: node
linkType: hard
"convert-source-map@npm:^2.0.0":
version: 2.0.0
resolution: "convert-source-map@npm:2.0.0"
checksum: 63ae9933be5a2b8d4509daca5124e20c14d023c820258e484e32dc324d34c2754e71297c94a05784064ad27615037ef677e3f0c00469fb55f409d2bb21261035
languageName: node
linkType: hard
"cookie-signature@npm:1.0.6":
version: 1.0.6
resolution: "cookie-signature@npm:1.0.6"
@@ -13496,6 +13684,13 @@ __metadata:
languageName: node
linkType: hard
"eastasianwidth@npm:^0.2.0":
version: 0.2.0
resolution: "eastasianwidth@npm:0.2.0"
checksum: 7d00d7cd8e49b9afa762a813faac332dee781932d6f2c848dc348939c4253f1d4564341b7af1d041853bc3f32c2ef141b58e0a4d9862c17a7f08f68df1e0f1ed
languageName: node
linkType: hard
"ecc-jsbn@npm:~0.1.1":
version: 0.1.2
resolution: "ecc-jsbn@npm:0.1.2"
@@ -13862,6 +14057,48 @@ __metadata:
languageName: node
linkType: hard
"ember-auto-import@npm:^2.7.0":
version: 2.7.2
resolution: "ember-auto-import@npm:2.7.2"
dependencies:
"@babel/core": ^7.16.7
"@babel/plugin-proposal-class-properties": ^7.16.7
"@babel/plugin-proposal-decorators": ^7.16.7
"@babel/plugin-proposal-private-methods": ^7.16.7
"@babel/plugin-transform-class-static-block": ^7.16.7
"@babel/preset-env": ^7.16.7
"@embroider/macros": ^1.0.0
"@embroider/shared-internals": ^2.0.0
babel-loader: ^8.0.6
babel-plugin-ember-modules-api-polyfill: ^3.5.0
babel-plugin-ember-template-compilation: ^2.0.1
babel-plugin-htmlbars-inline-precompile: ^5.2.1
babel-plugin-syntax-dynamic-import: ^6.18.0
broccoli-debug: ^0.6.4
broccoli-funnel: ^3.0.8
broccoli-merge-trees: ^4.2.0
broccoli-plugin: ^4.0.0
broccoli-source: ^3.0.0
css-loader: ^5.2.0
debug: ^4.3.1
fs-extra: ^10.0.0
fs-tree-diff: ^2.0.0
handlebars: ^4.3.1
js-string-escape: ^1.0.1
lodash: ^4.17.19
mini-css-extract-plugin: ^2.5.2
minimatch: ^3.0.0
parse5: ^6.0.1
resolve: ^1.20.0
resolve-package-path: ^4.0.3
semver: ^7.3.4
style-loader: ^2.0.0
typescript-memoize: ^1.0.0-alpha.3
walk-sync: ^3.0.0
checksum: c998028f7330ecb9ffa5242b60b55f7e6722c3b45959b1b0910cdb9b82920abdefd36f393a80e79bdfd0ff6532641ff1a3fd5b668bfdecd883ecafb00620c15f
languageName: node
linkType: hard
"ember-basic-dropdown@npm:6.0.1":
version: 6.0.1
resolution: "ember-basic-dropdown@npm:6.0.1"
@@ -14923,6 +15160,31 @@ __metadata:
languageName: node
linkType: hard
"ember-exam@npm:^9.0.0":
version: 9.0.0
resolution: "ember-exam@npm:9.0.0"
dependencies:
"@babel/core": ^7.23.6
chalk: ^5.3.0
cli-table3: ^0.6.0
debug: ^4.2.0
ember-auto-import: ^2.7.0
ember-cli-babel: ^8.2.0
execa: ^8.0.1
fs-extra: ^11.2.0
js-yaml: ^4.0.0
npmlog: ^7.0.0
rimraf: ^5.0.0
semver: ^7.3.2
silent-error: ^1.1.1
peerDependencies:
ember-qunit: "*"
ember-source: ">= 4.0.0"
qunit: "*"
checksum: 5ff9dd11d89d96d6ca018e145eefad558f38b93ee95d39108bb919514c66c731713b00c68725e2377da738c1cf4854ce89b6132d2df44be93a4d99900f589a7b
languageName: node
linkType: hard
"ember-fetch@npm:^8.1.2":
version: 8.1.2
resolution: "ember-fetch@npm:8.1.2"
@@ -15538,6 +15800,13 @@ __metadata:
languageName: node
linkType: hard
"emoji-regex@npm:^9.2.2":
version: 9.2.2
resolution: "emoji-regex@npm:9.2.2"
checksum: 8487182da74aabd810ac6d6f1994111dfc0e331b01271ae01ec1eb0ad7b5ecc2bbbbd2f053c05cb55a1ac30449527d819bbfbf0e3de1023db308cbcb47f86601
languageName: node
linkType: hard
"emoji-regex@npm:~6.1.0":
version: 6.1.3
resolution: "emoji-regex@npm:6.1.3"
@@ -16328,6 +16597,13 @@ __metadata:
languageName: node
linkType: hard
"event-target-shim@npm:^5.0.0":
version: 5.0.1
resolution: "event-target-shim@npm:5.0.1"
checksum: 1ffe3bb22a6d51bdeb6bf6f7cf97d2ff4a74b017ad12284cc9e6a279e727dc30a5de6bb613e5596ff4dc3e517841339ad09a7eec44266eccb1aa201a30448166
languageName: node
linkType: hard
"eventemitter3@npm:^4.0.0":
version: 4.0.7
resolution: "eventemitter3@npm:4.0.7"
@@ -16342,7 +16618,7 @@ __metadata:
languageName: node
linkType: hard
"events@npm:^3.0.0, events@npm:^3.2.0":
"events@npm:^3.0.0, events@npm:^3.2.0, events@npm:^3.3.0":
version: 3.3.0
resolution: "events@npm:3.3.0"
checksum: f6f487ad2198aa41d878fa31452f1a3c00958f46e9019286ff4787c84aac329332ab45c9cdc8c445928fc6d7ded294b9e005a7fce9426488518017831b272780
@@ -16451,6 +16727,23 @@ __metadata:
languageName: node
linkType: hard
"execa@npm:^8.0.1":
version: 8.0.1
resolution: "execa@npm:8.0.1"
dependencies:
cross-spawn: ^7.0.3
get-stream: ^8.0.1
human-signals: ^5.0.0
is-stream: ^3.0.0
merge-stream: ^2.0.0
npm-run-path: ^5.1.0
onetime: ^6.0.0
signal-exit: ^4.1.0
strip-final-newline: ^3.0.0
checksum: cac1bf86589d1d9b73bdc5dda65c52012d1a9619c44c526891956745f7b366ca2603d29fe3f7460bacc2b48c6eab5d6a4f7afe0534b31473d3708d1265545e1f
languageName: node
linkType: hard
"exists-sync@npm:0.0.4":
version: 0.0.4
resolution: "exists-sync@npm:0.0.4"
@@ -17187,6 +17480,16 @@ __metadata:
languageName: node
linkType: hard
"foreground-child@npm:^3.1.0":
version: 3.1.1
resolution: "foreground-child@npm:3.1.1"
dependencies:
cross-spawn: ^7.0.0
signal-exit: ^4.0.1
checksum: 139d270bc82dc9e6f8bc045fe2aae4001dc2472157044fdfad376d0a3457f77857fa883c1c8b21b491c6caade9a926a4bed3d3d2e8d3c9202b151a4cbbd0bcd5
languageName: node
linkType: hard
"forever-agent@npm:~0.6.1":
version: 0.6.1
resolution: "forever-agent@npm:0.6.1"
@@ -17299,6 +17602,17 @@ __metadata:
languageName: node
linkType: hard
"fs-extra@npm:^11.2.0":
version: 11.2.0
resolution: "fs-extra@npm:11.2.0"
dependencies:
graceful-fs: ^4.2.0
jsonfile: ^6.0.1
universalify: ^2.0.0
checksum: b12e42fa40ba47104202f57b8480dd098aa931c2724565e5e70779ab87605665594e76ee5fb00545f772ab9ace167fe06d2ab009c416dc8c842c5ae6df7aa7e8
languageName: node
linkType: hard
"fs-extra@npm:^3.0.1":
version: 3.0.1
resolution: "fs-extra@npm:3.0.1"
@@ -17545,6 +17859,22 @@ __metadata:
languageName: node
linkType: hard
"gauge@npm:^5.0.0":
version: 5.0.1
resolution: "gauge@npm:5.0.1"
dependencies:
aproba: ^1.0.3 || ^2.0.0
color-support: ^1.1.3
console-control-strings: ^1.1.0
has-unicode: ^2.0.1
signal-exit: ^4.0.1
string-width: ^4.2.3
strip-ansi: ^6.0.1
wide-align: ^1.1.5
checksum: 09b1eb8d8c850df7e4e2822feef27427afc845d4839fa13a08ddad74f882caf668dd1e77ac5e059d3e9a7b0cef59b706d28be40e1dc5fd326da32965e1f206a6
languageName: node
linkType: hard
"gensync@npm:^1.0.0-beta.2":
version: 1.0.0-beta.2
resolution: "gensync@npm:1.0.0-beta.2"
@@ -17635,6 +17965,13 @@ __metadata:
languageName: node
linkType: hard
"get-stream@npm:^8.0.1":
version: 8.0.1
resolution: "get-stream@npm:8.0.1"
checksum: 01e3d3cf29e1393f05f44d2f00445c5f9ec3d1c49e8179b31795484b9c117f4c695e5e07b88b50785d5c8248a788c85d9913a79266fc77e3ef11f78f10f1b974
languageName: node
linkType: hard
"get-symbol-description@npm:^1.0.0":
version: 1.0.0
resolution: "get-symbol-description@npm:1.0.0"
@@ -17710,6 +18047,21 @@ __metadata:
languageName: node
linkType: hard
"glob@npm:^10.3.7":
version: 10.3.10
resolution: "glob@npm:10.3.10"
dependencies:
foreground-child: ^3.1.0
jackspeak: ^2.3.5
minimatch: ^9.0.1
minipass: ^5.0.0 || ^6.0.2 || ^7.0.0
path-scurry: ^1.10.1
bin:
glob: dist/esm/bin.mjs
checksum: 4f2fe2511e157b5a3f525a54092169a5f92405f24d2aed3142f4411df328baca13059f4182f1db1bf933e2c69c0bd89e57ae87edd8950cba8c7ccbe84f721cf3
languageName: node
linkType: hard
"glob@npm:^5.0.10":
version: 5.0.15
resolution: "glob@npm:5.0.15"
@@ -18507,6 +18859,13 @@ __metadata:
languageName: node
linkType: hard
"human-signals@npm:^5.0.0":
version: 5.0.0
resolution: "human-signals@npm:5.0.0"
checksum: 6504560d5ed91444f16bea3bd9dfc66110a339442084e56c3e7fa7bbdf3f406426d6563d662bdce67064b165eac31eeabfc0857ed170aaa612cf14ec9f9a464c
languageName: node
linkType: hard
"humanize-ms@npm:^1.2.1":
version: 1.2.1
resolution: "humanize-ms@npm:1.2.1"
@@ -18543,7 +18902,7 @@ __metadata:
languageName: node
linkType: hard
"ieee754@npm:^1.1.13, ieee754@npm:^1.1.4":
"ieee754@npm:^1.1.13, ieee754@npm:^1.1.4, ieee754@npm:^1.2.1":
version: 1.2.1
resolution: "ieee754@npm:1.2.1"
checksum: 5144c0c9815e54ada181d80a0b810221a253562422e7c6c3a60b1901154184f49326ec239d618c416c1c5945a2e197107aee8d986a3dd836b53dffefd99b5e7e
@@ -19316,6 +19675,13 @@ __metadata:
languageName: node
linkType: hard
"is-stream@npm:^3.0.0":
version: 3.0.0
resolution: "is-stream@npm:3.0.0"
checksum: 172093fe99119ffd07611ab6d1bcccfe8bc4aa80d864b15f43e63e54b7abc71e779acd69afdb854c4e2a67fdc16ae710e370eda40088d1cfc956a50ed82d8f16
languageName: node
linkType: hard
"is-string@npm:^1.0.5, is-string@npm:^1.0.7":
version: 1.0.7
resolution: "is-string@npm:1.0.7"
@@ -19508,6 +19874,19 @@ __metadata:
languageName: node
linkType: hard
"jackspeak@npm:^2.3.5":
version: 2.3.6
resolution: "jackspeak@npm:2.3.6"
dependencies:
"@isaacs/cliui": ^8.0.2
"@pkgjs/parseargs": ^0.11.0
dependenciesMeta:
"@pkgjs/parseargs":
optional: true
checksum: 57d43ad11eadc98cdfe7496612f6bbb5255ea69fe51ea431162db302c2a11011642f50cfad57288bd0aea78384a0612b16e131944ad8ecd09d619041c8531b54
languageName: node
linkType: hard
"jest-worker@npm:^27.4.5":
version: 27.5.1
resolution: "jest-worker@npm:27.5.1"
@@ -19566,7 +19945,7 @@ __metadata:
languageName: node
linkType: hard
"js-yaml@npm:^4.1.0":
"js-yaml@npm:^4.0.0, js-yaml@npm:^4.1.0":
version: 4.1.0
resolution: "js-yaml@npm:4.1.0"
dependencies:
@@ -19737,7 +20116,7 @@ __metadata:
languageName: node
linkType: hard
"json5@npm:^2.1.1, json5@npm:^2.2.2":
"json5@npm:^2.1.1, json5@npm:^2.2.2, json5@npm:^2.2.3":
version: 2.2.3
resolution: "json5@npm:2.2.3"
bin:
@@ -20641,6 +21020,13 @@ __metadata:
languageName: node
linkType: hard
"lru-cache@npm:^9.1.1 || ^10.0.0":
version: 10.1.0
resolution: "lru-cache@npm:10.1.0"
checksum: 58056d33e2500fbedce92f8c542e7c11b50d7d086578f14b7074d8c241422004af0718e08a6eaae8705cee09c77e39a61c1c79e9370ba689b7010c152e6a76ab
languageName: node
linkType: hard
"magic-string@npm:^0.24.0":
version: 0.24.1
resolution: "magic-string@npm:0.24.1"
@@ -21327,6 +21713,13 @@ __metadata:
languageName: node
linkType: hard
"mimic-fn@npm:^4.0.0":
version: 4.0.0
resolution: "mimic-fn@npm:4.0.0"
checksum: 995dcece15ee29aa16e188de6633d43a3db4611bcf93620e7e62109ec41c79c0f34277165b8ce5e361205049766e371851264c21ac64ca35499acb5421c2ba56
languageName: node
linkType: hard
"min-indent@npm:^1.0.1":
version: 1.0.1
resolution: "min-indent@npm:1.0.1"
@@ -21470,6 +21863,13 @@ __metadata:
languageName: node
linkType: hard
"minipass@npm:^5.0.0 || ^6.0.2 || ^7.0.0":
version: 7.0.4
resolution: "minipass@npm:7.0.4"
checksum: 87585e258b9488caf2e7acea242fd7856bbe9a2c84a7807643513a338d66f368c7d518200ad7b70a508664d408aa000517647b2930c259a8b1f9f0984f344a21
languageName: node
linkType: hard
"minizlib@npm:^2.1.1, minizlib@npm:^2.1.2":
version: 2.1.2
resolution: "minizlib@npm:2.1.2"
@@ -22089,6 +22489,15 @@ __metadata:
languageName: node
linkType: hard
"npm-run-path@npm:^5.1.0":
version: 5.2.0
resolution: "npm-run-path@npm:5.2.0"
dependencies:
path-key: ^4.0.0
checksum: c5325e016014e715689c4014f7e0be16cc4cbf529f32a1723e511bc4689b5f823b704d2bca61ac152ce2bda65e0205dc8b3ba0ec0f5e4c3e162d302f6f5b9efb
languageName: node
linkType: hard
"npmlog@npm:^6.0.0":
version: 6.0.2
resolution: "npmlog@npm:6.0.2"
@@ -22101,6 +22510,18 @@ __metadata:
languageName: node
linkType: hard
"npmlog@npm:^7.0.0":
version: 7.0.1
resolution: "npmlog@npm:7.0.1"
dependencies:
are-we-there-yet: ^4.0.0
console-control-strings: ^1.1.0
gauge: ^5.0.0
set-blocking: ^2.0.0
checksum: caabeb1f557c1094ad7ed3275b968b83ccbaefc133f17366ebb9fe8eb44e1aace28c31419d6244bfc0422aede1202875d555fe6661978bf04386f6cf617f43a4
languageName: node
linkType: hard
"nth-check@npm:^1.0.2":
version: 1.0.2
resolution: "nth-check@npm:1.0.2"
@@ -22320,6 +22741,15 @@ __metadata:
languageName: node
linkType: hard
"onetime@npm:^6.0.0":
version: 6.0.0
resolution: "onetime@npm:6.0.0"
dependencies:
mimic-fn: ^4.0.0
checksum: 0846ce78e440841335d4e9182ef69d5762e9f38aa7499b19f42ea1c4cd40f0b4446094c455c713f9adac3f4ae86f613bb5e30c99e52652764d06a89f709b3788
languageName: node
linkType: hard
"optionator@npm:^0.8.1":
version: 0.8.3
resolution: "optionator@npm:0.8.3"
@@ -22767,6 +23197,13 @@ __metadata:
languageName: node
linkType: hard
"path-key@npm:^4.0.0":
version: 4.0.0
resolution: "path-key@npm:4.0.0"
checksum: 8e6c314ae6d16b83e93032c61020129f6f4484590a777eed709c4a01b50e498822b00f76ceaf94bc64dbd90b327df56ceadce27da3d83393790f1219e07721d7
languageName: node
linkType: hard
"path-parse@npm:^1.0.6, path-parse@npm:^1.0.7":
version: 1.0.7
resolution: "path-parse@npm:1.0.7"
@@ -22797,6 +23234,16 @@ __metadata:
languageName: node
linkType: hard
"path-scurry@npm:^1.10.1":
version: 1.10.1
resolution: "path-scurry@npm:1.10.1"
dependencies:
lru-cache: ^9.1.1 || ^10.0.0
minipass: ^5.0.0 || ^6.0.2 || ^7.0.0
checksum: e2557cff3a8fb8bc07afdd6ab163a92587884f9969b05bbbaf6fe7379348bfb09af9ed292af12ed32398b15fb443e81692047b786d1eeb6d898a51eb17ed7d90
languageName: node
linkType: hard
"path-to-regexp@npm:0.1.7":
version: 0.1.7
resolution: "path-to-regexp@npm:0.1.7"
@@ -23662,6 +24109,19 @@ __metadata:
languageName: node
linkType: hard
"readable-stream@npm:^4.1.0":
version: 4.5.2
resolution: "readable-stream@npm:4.5.2"
dependencies:
abort-controller: ^3.0.0
buffer: ^6.0.3
events: ^3.3.0
process: ^0.11.10
string_decoder: ^1.3.0
checksum: c4030ccff010b83e4f33289c535f7830190773e274b3fcb6e2541475070bdfd69c98001c3b0cb78763fc00c8b62f514d96c2b10a8bd35d5ce45203a25fa1d33a
languageName: node
linkType: hard
"readable-stream@npm:~1.0.2":
version: 1.0.34
resolution: "readable-stream@npm:1.0.34"
@@ -24415,6 +24875,17 @@ __metadata:
languageName: node
linkType: hard
"rimraf@npm:^5.0.0":
version: 5.0.5
resolution: "rimraf@npm:5.0.5"
dependencies:
glob: ^10.3.7
bin:
rimraf: dist/esm/bin.mjs
checksum: d66eef829b2e23b16445f34e73d75c7b7cf4cbc8834b04720def1c8f298eb0753c3d76df77325fad79d0a2c60470525d95f89c2475283ad985fd7441c32732d1
languageName: node
linkType: hard
"rimraf@npm:~2.6.2":
version: 2.6.3
resolution: "rimraf@npm:2.6.3"
@@ -25096,6 +25567,13 @@ __metadata:
languageName: node
linkType: hard
"signal-exit@npm:^4.1.0":
version: 4.1.0
resolution: "signal-exit@npm:4.1.0"
checksum: 64c757b498cb8629ffa5f75485340594d2f8189e9b08700e69199069c8e3070fb3e255f7ab873c05dc0b3cec412aea7402e10a5990cb6a050bd33ba062a6c549
languageName: node
linkType: hard
"silent-error@npm:^1.0.0, silent-error@npm:^1.0.1, silent-error@npm:^1.1.0, silent-error@npm:^1.1.1":
version: 1.1.1
resolution: "silent-error@npm:1.1.1"
@@ -25646,6 +26124,17 @@ __metadata:
languageName: node
linkType: hard
"string-width-cjs@npm:string-width@^4.2.0, string-width@npm:^4.1.0, string-width@npm:^4.2.0":
version: 4.2.2
resolution: "string-width@npm:4.2.2"
dependencies:
emoji-regex: ^8.0.0
is-fullwidth-code-point: ^3.0.0
strip-ansi: ^6.0.0
checksum: 343e089b0e66e0f72aab4ad1d9b6f2c9cc5255844b0c83fd9b53f2a3b3fd0421bdd6cb05be96a73117eb012db0887a6c1d64ca95aaa50c518e48980483fea0ab
languageName: node
linkType: hard
"string-width@npm:^1.0.2 || 2 || 3 || 4, string-width@npm:^4.2.3":
version: 4.2.3
resolution: "string-width@npm:4.2.3"
@@ -25678,14 +26167,14 @@ __metadata:
languageName: node
linkType: hard
"string-width@npm:^4.1.0, string-width@npm:^4.2.0":
version: 4.2.2
resolution: "string-width@npm:4.2.2"
"string-width@npm:^5.0.1, string-width@npm:^5.1.2":
version: 5.1.2
resolution: "string-width@npm:5.1.2"
dependencies:
emoji-regex: ^8.0.0
is-fullwidth-code-point: ^3.0.0
strip-ansi: ^6.0.0
checksum: 343e089b0e66e0f72aab4ad1d9b6f2c9cc5255844b0c83fd9b53f2a3b3fd0421bdd6cb05be96a73117eb012db0887a6c1d64ca95aaa50c518e48980483fea0ab
eastasianwidth: ^0.2.0
emoji-regex: ^9.2.2
strip-ansi: ^7.0.1
checksum: 7369deaa29f21dda9a438686154b62c2c5f661f8dda60449088f9f980196f7908fc39fdd1803e3e01541970287cf5deae336798337e9319a7055af89dafa7193
languageName: node
linkType: hard
@@ -25806,7 +26295,7 @@ __metadata:
languageName: node
linkType: hard
"string_decoder@npm:^1.0.0, string_decoder@npm:^1.1.1":
"string_decoder@npm:^1.0.0, string_decoder@npm:^1.1.1, string_decoder@npm:^1.3.0":
version: 1.3.0
resolution: "string_decoder@npm:1.3.0"
dependencies:
@@ -25835,6 +26324,15 @@ __metadata:
languageName: node
linkType: hard
"strip-ansi-cjs@npm:strip-ansi@^6.0.1, strip-ansi@npm:^6.0.1":
version: 6.0.1
resolution: "strip-ansi@npm:6.0.1"
dependencies:
ansi-regex: ^5.0.1
checksum: f3cd25890aef3ba6e1a74e20896c21a46f482e93df4a06567cebf2b57edabb15133f1f94e57434e0a958d61186087b1008e89c94875d019910a213181a14fc8c
languageName: node
linkType: hard
"strip-ansi@npm:^3.0.0":
version: 3.0.1
resolution: "strip-ansi@npm:3.0.1"
@@ -25871,12 +26369,12 @@ __metadata:
languageName: node
linkType: hard
"strip-ansi@npm:^6.0.1":
version: 6.0.1
resolution: "strip-ansi@npm:6.0.1"
"strip-ansi@npm:^7.0.1":
version: 7.1.0
resolution: "strip-ansi@npm:7.1.0"
dependencies:
ansi-regex: ^5.0.1
checksum: f3cd25890aef3ba6e1a74e20896c21a46f482e93df4a06567cebf2b57edabb15133f1f94e57434e0a958d61186087b1008e89c94875d019910a213181a14fc8c
ansi-regex: ^6.0.1
checksum: 859c73fcf27869c22a4e4d8c6acfe690064659e84bef9458aa6d13719d09ca88dcfd40cbf31fd0be63518ea1a643fe070b4827d353e09533a5b0b9fd4553d64d
languageName: node
linkType: hard
@@ -25917,6 +26415,13 @@ __metadata:
languageName: node
linkType: hard
"strip-final-newline@npm:^3.0.0":
version: 3.0.0
resolution: "strip-final-newline@npm:3.0.0"
checksum: 23ee263adfa2070cd0f23d1ac14e2ed2f000c9b44229aec9c799f1367ec001478469560abefd00c5c99ee6f0b31c137d53ec6029c53e9f32a93804e18c201050
languageName: node
linkType: hard
"strip-indent@npm:^4.0.0":
version: 4.0.0
resolution: "strip-indent@npm:4.0.0"
@@ -27425,6 +27930,7 @@ __metadata:
ember-d3: ^0.5.1
ember-data: ~4.11.3
ember-engines: 0.8.23
ember-exam: ^9.0.0
ember-fetch: ^8.1.2
ember-inflector: 4.0.2
ember-load-initializers: ^2.1.2
@@ -28025,6 +28531,17 @@ __metadata:
languageName: node
linkType: hard
"wrap-ansi-cjs@npm:wrap-ansi@^7.0.0, wrap-ansi@npm:^7.0.0":
version: 7.0.0
resolution: "wrap-ansi@npm:7.0.0"
dependencies:
ansi-styles: ^4.0.0
string-width: ^4.1.0
strip-ansi: ^6.0.0
checksum: a790b846fd4505de962ba728a21aaeda189b8ee1c7568ca5e817d85930e06ef8d1689d49dbf0e881e8ef84436af3a88bc49115c2e2788d841ff1b8b5b51a608b
languageName: node
linkType: hard
"wrap-ansi@npm:^5.1.0":
version: 5.1.0
resolution: "wrap-ansi@npm:5.1.0"
@@ -28047,14 +28564,14 @@ __metadata:
languageName: node
linkType: hard
"wrap-ansi@npm:^7.0.0":
version: 7.0.0
resolution: "wrap-ansi@npm:7.0.0"
"wrap-ansi@npm:^8.1.0":
version: 8.1.0
resolution: "wrap-ansi@npm:8.1.0"
dependencies:
ansi-styles: ^4.0.0
string-width: ^4.1.0
strip-ansi: ^6.0.0
checksum: a790b846fd4505de962ba728a21aaeda189b8ee1c7568ca5e817d85930e06ef8d1689d49dbf0e881e8ef84436af3a88bc49115c2e2788d841ff1b8b5b51a608b
ansi-styles: ^6.1.0
string-width: ^5.0.1
strip-ansi: ^7.0.1
checksum: 371733296dc2d616900ce15a0049dca0ef67597d6394c57347ba334393599e800bab03c41d4d45221b6bc967b8c453ec3ae4749eff3894202d16800fdfe0e238
languageName: node
linkType: hard

View File

@@ -19,17 +19,17 @@ import (
)
type NamespaceRecord struct {
NamespaceID string `json:"namespace_id"`
Entities uint64 `json:"entities"`
NonEntityTokens uint64 `json:"non_entity_tokens"`
SecretSyncAssociations uint64 `json:"secret_sync_associations"`
Mounts []*MountRecord `json:"mounts"`
NamespaceID string `json:"namespace_id"`
Entities uint64 `json:"entities"`
NonEntityTokens uint64 `json:"non_entity_tokens"`
SecretSyncs uint64 `json:"secret_syncs"`
Mounts []*MountRecord `json:"mounts"`
}
type CountsRecord struct {
EntityClients int `json:"entity_clients"`
NonEntityClients int `json:"non_entity_clients"`
SecretSyncAssociations int `json:"secret_sync_associations"`
EntityClients int `json:"entity_clients"`
NonEntityClients int `json:"non_entity_clients"`
SecretSyncs int `json:"secret_syncs"`
}
type NewClientRecord struct {

View File

@@ -82,9 +82,9 @@ const (
// Known types of activity events; there's presently two internal event
// types (tokens/clients with and without entities), but we're beginning
// to support additional buckets for e.g., ACME requests.
nonEntityTokenActivityType = "non-entity-token"
entityActivityType = "entity"
secretSyncAssociationActivityType = "secret-sync-association"
nonEntityTokenActivityType = "non-entity-token"
entityActivityType = "entity"
secretSyncActivityType = "secret-sync"
)
type segmentInfo struct {
@@ -2033,9 +2033,9 @@ func (p *processCounts) contains(client *activity.EntityRecord) bool {
func (p *processCounts) toCountsRecord() *activity.CountsRecord {
return &activity.CountsRecord{
EntityClients: p.countByType(entityActivityType),
NonEntityClients: p.countByType(nonEntityTokenActivityType),
SecretSyncAssociations: p.countByType(secretSyncAssociationActivityType),
EntityClients: p.countByType(entityActivityType),
NonEntityClients: p.countByType(nonEntityTokenActivityType),
SecretSyncs: p.countByType(secretSyncActivityType),
}
}

View File

@@ -4295,7 +4295,7 @@ func TestActivityLog_processNewClients_delete(t *testing.T) {
byNS := newClients.Namespaces
counts := newClients.Counts
for _, typ := range []string{nonEntityTokenActivityType, secretSyncAssociationActivityType, entityActivityType, ACMEActivityType} {
for _, typ := range []string{nonEntityTokenActivityType, secretSyncActivityType, entityActivityType, ACMEActivityType} {
require.NotContains(t, counts.clientsByType(typ), clientID)
require.NotContains(t, byNS[namespace].Mounts[mount].Counts.clientsByType(typ), clientID)
require.NotContains(t, byNS[namespace].Counts.clientsByType(typ), clientID)
@@ -4308,7 +4308,7 @@ func TestActivityLog_processNewClients_delete(t *testing.T) {
run(t, nonEntityTokenActivityType)
})
t.Run("secret sync", func(t *testing.T) {
run(t, secretSyncAssociationActivityType)
run(t, secretSyncActivityType)
})
t.Run("acme", func(t *testing.T) {
run(t, ACMEActivityType)
@@ -4342,7 +4342,7 @@ func TestActivityLog_processClientRecord(t *testing.T) {
require.Equal(t, byMonth[monthIndex].Namespaces, byNS)
require.Equal(t, byMonth[monthIndex].NewClients.Namespaces, byNS)
for _, typ := range []string{nonEntityTokenActivityType, secretSyncAssociationActivityType, entityActivityType} {
for _, typ := range []string{nonEntityTokenActivityType, secretSyncActivityType, entityActivityType} {
if clientType == typ || (clientType == ACMEActivityType && typ == nonEntityTokenActivityType) {
require.Contains(t, byMonth[monthIndex].Counts.clientsByType(typ), clientID)
require.Contains(t, byMonth[monthIndex].NewClients.Counts.clientsByType(typ), clientID)
@@ -4364,7 +4364,7 @@ func TestActivityLog_processClientRecord(t *testing.T) {
run(t, entityActivityType)
})
t.Run("secret sync", func(t *testing.T) {
run(t, secretSyncAssociationActivityType)
run(t, secretSyncActivityType)
})
t.Run("acme", func(t *testing.T) {
run(t, ACMEActivityType)
@@ -4651,7 +4651,7 @@ func TestActivityLog_writePrecomputedQuery(t *testing.T) {
ClientID: "id-3",
NamespaceID: "ns-3",
MountAccessor: "mnt-3",
ClientType: secretSyncAssociationActivityType,
ClientType: secretSyncActivityType,
}
now := time.Now()
@@ -4690,13 +4690,13 @@ func TestActivityLog_writePrecomputedQuery(t *testing.T) {
require.Equal(t, ns1.Entities, uint64(1))
require.Equal(t, ns1.NonEntityTokens, uint64(0))
require.Equal(t, ns1.SecretSyncAssociations, uint64(0))
require.Equal(t, ns1.SecretSyncs, uint64(0))
require.Equal(t, ns2.Entities, uint64(0))
require.Equal(t, ns2.NonEntityTokens, uint64(1))
require.Equal(t, ns2.SecretSyncAssociations, uint64(0))
require.Equal(t, ns2.SecretSyncs, uint64(0))
require.Equal(t, ns3.Entities, uint64(0))
require.Equal(t, ns3.NonEntityTokens, uint64(0))
require.Equal(t, ns3.SecretSyncAssociations, uint64(1))
require.Equal(t, ns3.SecretSyncs, uint64(1))
require.Len(t, ns1.Mounts, 1)
require.Len(t, ns2.Mounts, 1)
@@ -4711,29 +4711,29 @@ func TestActivityLog_writePrecomputedQuery(t *testing.T) {
// ns1 only has an entity client
require.Equal(t, 1, ns1.Mounts[0].Counts.EntityClients)
require.Equal(t, 0, ns1.Mounts[0].Counts.NonEntityClients)
require.Equal(t, 0, ns1.Mounts[0].Counts.SecretSyncAssociations)
require.Equal(t, 0, ns1.Mounts[0].Counts.SecretSyncs)
// ns2 only has a non entity client
require.Equal(t, 0, ns2.Mounts[0].Counts.EntityClients)
require.Equal(t, 1, ns2.Mounts[0].Counts.NonEntityClients)
require.Equal(t, 0, ns2.Mounts[0].Counts.SecretSyncAssociations)
require.Equal(t, 0, ns2.Mounts[0].Counts.SecretSyncs)
// ns3 only has a secret sync association
require.Equal(t, 0, ns3.Mounts[0].Counts.EntityClients)
require.Equal(t, 0, ns3.Mounts[0].Counts.NonEntityClients)
require.Equal(t, 1, ns3.Mounts[0].Counts.SecretSyncAssociations)
require.Equal(t, 1, ns3.Mounts[0].Counts.SecretSyncs)
monthRecord := val.Months[0]
// there should only be one month present, since the clients were added with the same timestamp
require.Equal(t, monthRecord.Timestamp, timeutil.StartOfMonth(now).UTC().Unix())
require.Equal(t, 1, monthRecord.Counts.NonEntityClients)
require.Equal(t, 1, monthRecord.Counts.EntityClients)
require.Equal(t, 1, monthRecord.Counts.SecretSyncAssociations)
require.Equal(t, 1, monthRecord.Counts.SecretSyncs)
require.Len(t, monthRecord.Namespaces, 3)
require.Len(t, monthRecord.NewClients.Namespaces, 3)
require.Equal(t, 1, monthRecord.NewClients.Counts.EntityClients)
require.Equal(t, 1, monthRecord.NewClients.Counts.NonEntityClients)
require.Equal(t, 1, monthRecord.NewClients.Counts.SecretSyncAssociations)
require.Equal(t, 1, monthRecord.NewClients.Counts.SecretSyncs)
}
type mockTimeNowClock struct {

View File

@@ -110,59 +110,61 @@ func (a *ActivityLog) computeCurrentMonthForBillingPeriodInternal(ctx context.Co
}
hllMonthlyTimestamp = timeutil.StartOfNextMonth(hllMonthlyTimestamp)
}
// Now we will add the clients for the current month to a copy of the billing period's hll to
// see how the cardinality grows.
billingPeriodHLLWithCurrentMonthEntityClients := billingPeriodHLL.Clone()
billingPeriodHLLWithCurrentMonthNonEntityClients := billingPeriodHLL.Clone()
// There's at most one month of data here. We should validate this assumption explicitly
if len(byMonth) > 1 {
return nil, errors.New(fmt.Sprintf("multiple months of data found in partial month's client count breakdowns: %+v\n", byMonth))
}
totalEntities := 0
totalNonEntities := 0
for _, month := range byMonth {
activityTypes := []string{entityActivityType, nonEntityTokenActivityType, secretSyncActivityType}
// Now we will add the clients for the current month to a copy of the billing period's hll to
// see how the cardinality grows.
hllByType := make(map[string]*hyperloglog.Sketch, len(activityTypes))
totalByType := make(map[string]int, len(activityTypes))
for _, typ := range activityTypes {
hllByType[typ] = billingPeriodHLL.Clone()
}
for _, month := range byMonth {
if month.NewClients == nil || month.NewClients.Counts == nil || month.Counts == nil {
return nil, errors.New("malformed current month used to calculate current month's activity")
}
// Note that the following calculations assume that all clients seen are currently in
// the NewClients section of byMonth. It is best to explicitly check this, just verify
// our assumptions about the passed in byMonth argument.
if month.Counts.countByType(entityActivityType) != month.NewClients.Counts.countByType(entityActivityType) ||
month.Counts.countByType(nonEntityTokenActivityType) != month.NewClients.Counts.countByType(nonEntityTokenActivityType) {
return nil, errors.New("current month clients cache assumes billing period")
}
// All the clients for the current month are in the newClients section, initially.
// We need to deduplicate these clients across the billing period by adding them
// into the billing period hyperloglogs.
entities := month.NewClients.Counts.clientsByType(entityActivityType)
nonEntities := month.NewClients.Counts.clientsByType(nonEntityTokenActivityType)
if entities != nil {
for entityID := range entities {
billingPeriodHLLWithCurrentMonthEntityClients.Insert([]byte(entityID))
totalEntities += 1
for _, typ := range activityTypes {
// Note that the following calculations assume that all clients seen are currently in
// the NewClients section of byMonth. It is best to explicitly check this, just verify
// our assumptions about the passed in byMonth argument.
if month.Counts.countByType(typ) != month.NewClients.Counts.countByType(typ) {
return nil, errors.New("current month clients cache assumes billing period")
}
}
if nonEntities != nil {
for nonEntityID := range nonEntities {
billingPeriodHLLWithCurrentMonthNonEntityClients.Insert([]byte(nonEntityID))
totalNonEntities += 1
for clientID := range month.NewClients.Counts.clientsByType(typ) {
// All the clients for the current month are in the newClients section, initially.
// We need to deduplicate these clients across the billing period by adding them
// into the billing period hyperloglogs.
hllByType[typ].Insert([]byte(clientID))
totalByType[typ] += 1
}
}
}
// The number of new entities for the current month is approximately the size of the hll with
// the current month's entities minus the size of the initial billing period hll.
currentMonthNewEntities := billingPeriodHLLWithCurrentMonthEntityClients.Estimate() - billingPeriodHLL.Estimate()
currentMonthNewNonEntities := billingPeriodHLLWithCurrentMonthNonEntityClients.Estimate() - billingPeriodHLL.Estimate()
currentMonthNewByType := make(map[string]int, len(activityTypes))
for _, typ := range activityTypes {
// The number of new entities for the current month is approximately the size of the hll with
// the current month's entities minus the size of the initial billing period hll.
currentMonthNewByType[typ] = int(hllByType[typ].Estimate() - billingPeriodHLL.Estimate())
}
return &activity.MonthRecord{
Timestamp: timeutil.StartOfMonth(endTime).UTC().Unix(),
NewClients: &activity.NewClientRecord{Counts: &activity.CountsRecord{EntityClients: int(currentMonthNewEntities), NonEntityClients: int(currentMonthNewNonEntities)}},
Counts: &activity.CountsRecord{EntityClients: totalEntities, NonEntityClients: totalNonEntities},
Timestamp: timeutil.StartOfMonth(endTime).UTC().Unix(),
NewClients: &activity.NewClientRecord{Counts: &activity.CountsRecord{
EntityClients: currentMonthNewByType[entityActivityType],
NonEntityClients: currentMonthNewByType[nonEntityTokenActivityType],
SecretSyncs: currentMonthNewByType[secretSyncActivityType],
}},
Counts: &activity.CountsRecord{
EntityClients: totalByType[entityActivityType],
NonEntityClients: totalByType[nonEntityTokenActivityType],
SecretSyncs: totalByType[secretSyncActivityType],
},
}, nil
}
@@ -182,11 +184,11 @@ func (a *ActivityLog) transformALNamespaceBreakdowns(nsData map[string]*processB
for nsID, ns := range nsData {
nsRecord := activity.NamespaceRecord{
NamespaceID: nsID,
Entities: uint64(ns.Counts.countByType(entityActivityType)),
NonEntityTokens: uint64(ns.Counts.countByType(nonEntityTokenActivityType)),
SecretSyncAssociations: uint64(ns.Counts.countByType(secretSyncAssociationActivityType)),
Mounts: a.transformActivityLogMounts(ns.Mounts),
NamespaceID: nsID,
Entities: uint64(ns.Counts.countByType(entityActivityType)),
NonEntityTokens: uint64(ns.Counts.countByType(nonEntityTokenActivityType)),
SecretSyncs: uint64(ns.Counts.countByType(secretSyncActivityType)),
Mounts: a.transformActivityLogMounts(ns.Mounts),
}
byNamespace = append(byNamespace, &nsRecord)
}

View File

@@ -18,28 +18,31 @@ import (
"google.golang.org/protobuf/proto"
)
// Test_ActivityLog_ComputeCurrentMonthForBillingPeriodInternal creates 3 months of hyperloglogs and fills them with
// overlapping clients. The test calls computeCurrentMonthForBillingPeriodInternal with the current month map having
// some overlap with the previous months. The test then verifies that the results have the correct number of entity and
// non-entity clients. The test also calls computeCurrentMonthForBillingPeriodInternal with an empty current month map,
// Test_ActivityLog_ComputeCurrentMonthForBillingPeriodInternal creates 3 months
// of hyperloglogs and fills them with overlapping clients. The test calls
// computeCurrentMonthForBillingPeriodInternal with the current month map having
// some overlap with the previous months. The test then verifies that the
// results have the correct number of entity, non-entity, and secret sync
// association clients. The test also calls
// computeCurrentMonthForBillingPeriodInternal with an empty current month map,
// and verifies that the results are all 0.
func Test_ActivityLog_ComputeCurrentMonthForBillingPeriodInternal(t *testing.T) {
// populate the first month with clients 1-10
// populate the first month with clients 1-20
monthOneHLL := hyperloglog.New()
// populate the second month with clients 5-15
// populate the second month with clients 10-30
monthTwoHLL := hyperloglog.New()
// populate the third month with clients 10-20
// populate the third month with clients 20-40
monthThreeHLL := hyperloglog.New()
for i := 0; i < 20; i++ {
for i := 0; i < 40; i++ {
clientID := []byte(fmt.Sprintf("client_%d", i))
if i < 10 {
if i < 20 {
monthOneHLL.Insert(clientID)
}
if 5 <= i && i < 15 {
if 10 <= i && i < 20 {
monthTwoHLL.Insert(clientID)
}
if 10 <= i && i < 20 {
if 20 <= i && i < 40 {
monthThreeHLL.Insert(clientID)
}
}
@@ -57,51 +60,72 @@ func Test_ActivityLog_ComputeCurrentMonthForBillingPeriodInternal(t *testing.T)
return nil, fmt.Errorf("bad start time")
}
// Below we register the entity, non-entity, and secret sync clients that
// are seen in the current month
// Let's add 2 entities exclusive to month 1 (clients 0,1),
// 2 entities shared by month 1 and 2 (clients 5,6),
// 2 entities shared by month 2 and 3 (clients 10,11), and
// 2 entities exclusive to month 3 (15,16). Furthermore, we can add
// 3 new entities (clients 20,21, and 22).
entitiesStruct := make(map[string]struct{}, 0)
entitiesStruct["client_0"] = struct{}{}
entitiesStruct["client_1"] = struct{}{}
entitiesStruct["client_5"] = struct{}{}
entitiesStruct["client_6"] = struct{}{}
entitiesStruct["client_10"] = struct{}{}
entitiesStruct["client_11"] = struct{}{}
entitiesStruct["client_15"] = struct{}{}
entitiesStruct["client_16"] = struct{}{}
entitiesStruct["client_20"] = struct{}{}
entitiesStruct["client_21"] = struct{}{}
entitiesStruct["client_22"] = struct{}{}
// 2 entities shared by month 1 and 2 (clients 10,11),
// 2 entities shared by month 2 and 3 (clients 20,21), and
// 2 entities exclusive to month 3 (30,31). Furthermore, we can add
// 3 new entities (clients 40,41,42).
entitiesStruct := map[string]struct{}{
"client_0": {},
"client_1": {},
"client_10": {},
"client_11": {},
"client_20": {},
"client_21": {},
"client_30": {},
"client_31": {},
"client_40": {},
"client_41": {},
"client_42": {},
}
// We will add 3 nonentity clients from month 1 (clients 2,3,4),
// 3 shared by months 1 and 2 (7,8,9),
// 3 shared by months 2 and 3 (12,13,14), and
// 3 exclusive to month 3 (17,18,19). We will also
// add 4 new nonentity clients.
nonEntitiesStruct := make(map[string]struct{}, 0)
nonEntitiesStruct["client_2"] = struct{}{}
nonEntitiesStruct["client_3"] = struct{}{}
nonEntitiesStruct["client_4"] = struct{}{}
nonEntitiesStruct["client_7"] = struct{}{}
nonEntitiesStruct["client_8"] = struct{}{}
nonEntitiesStruct["client_9"] = struct{}{}
nonEntitiesStruct["client_12"] = struct{}{}
nonEntitiesStruct["client_13"] = struct{}{}
nonEntitiesStruct["client_14"] = struct{}{}
nonEntitiesStruct["client_17"] = struct{}{}
nonEntitiesStruct["client_18"] = struct{}{}
nonEntitiesStruct["client_19"] = struct{}{}
nonEntitiesStruct["client_23"] = struct{}{}
nonEntitiesStruct["client_24"] = struct{}{}
nonEntitiesStruct["client_25"] = struct{}{}
nonEntitiesStruct["client_26"] = struct{}{}
// 3 shared by months 1 and 2 (12,13,14),
// 3 shared by months 2 and 3 (22,23,24), and
// 3 exclusive to month 3 (32,33,34). We will also
// add 4 new nonentity clients (43,44,45,46)
nonEntitiesStruct := map[string]struct{}{
"client_2": {},
"client_3": {},
"client_4": {},
"client_12": {},
"client_13": {},
"client_14": {},
"client_22": {},
"client_23": {},
"client_24": {},
"client_32": {},
"client_33": {},
"client_34": {},
"client_43": {},
"client_44": {},
"client_45": {},
"client_46": {},
}
// secret syncs have 1 client from month 1 (5)
// 1 shared by months 1 and 2 (15)
// 1 shared by months 2 and 3 (25)
// 2 exclusive to month 3 (35,36)
// and 2 new clients (47,48)
secretSyncStruct := map[string]struct{}{
"client_5": {},
"client_15": {},
"client_25": {},
"client_35": {},
"client_36": {},
"client_47": {},
"client_48": {},
}
counts := &processCounts{
ClientsByType: map[string]clientIDSet{
entityActivityType: entitiesStruct,
nonEntityTokenActivityType: nonEntitiesStruct,
secretSyncActivityType: secretSyncStruct,
},
}
@@ -122,48 +146,29 @@ func Test_ActivityLog_ComputeCurrentMonthForBillingPeriodInternal(t *testing.T)
startTime := timeutil.MonthsPreviousTo(3, endTime)
monthRecord, err := a.computeCurrentMonthForBillingPeriodInternal(context.Background(), currentMonthClientsMap, mockHLLGetFunc, startTime, endTime)
if err != nil {
t.Fatal(err)
}
require.NoError(t, err)
// We should have 11 entity clients and 16 nonentity clients, and 3 new entity clients
// and 4 new nonentity clients
if monthRecord.Counts.EntityClients != 11 {
t.Fatalf("wrong number of entity clients. Expected 11, got %d", monthRecord.Counts.EntityClients)
}
if monthRecord.Counts.NonEntityClients != 16 {
t.Fatalf("wrong number of non entity clients. Expected 16, got %d", monthRecord.Counts.NonEntityClients)
}
if monthRecord.NewClients.Counts.EntityClients != 3 {
t.Fatalf("wrong number of new entity clients. Expected 3, got %d", monthRecord.NewClients.Counts.EntityClients)
}
if monthRecord.NewClients.Counts.NonEntityClients != 4 {
t.Fatalf("wrong number of new non entity clients. Expected 4, got %d", monthRecord.NewClients.Counts.NonEntityClients)
}
require.Equal(t, &activity.CountsRecord{
EntityClients: 11,
NonEntityClients: 16,
SecretSyncs: 7,
}, monthRecord.Counts)
require.Equal(t, &activity.CountsRecord{
EntityClients: 3,
NonEntityClients: 4,
SecretSyncs: 2,
}, monthRecord.NewClients.Counts)
// Attempt to compute current month when no records exist
endTime = time.Now().UTC()
startTime = timeutil.StartOfMonth(endTime)
emptyClientsMap := make(map[int64]*processMonth, 0)
monthRecord, err = a.computeCurrentMonthForBillingPeriodInternal(context.Background(), emptyClientsMap, mockHLLGetFunc, startTime, endTime)
if err != nil {
t.Fatalf("failed to compute empty current month, err: %v", err)
}
require.NoError(t, err)
// We should have 0 entity clients, nonentity clients,new entity clients
// and new nonentity clients
if monthRecord.Counts.EntityClients != 0 {
t.Fatalf("wrong number of entity clients. Expected 0, got %d", monthRecord.Counts.EntityClients)
}
if monthRecord.Counts.NonEntityClients != 0 {
t.Fatalf("wrong number of non entity clients. Expected 0, got %d", monthRecord.Counts.NonEntityClients)
}
if monthRecord.NewClients.Counts.EntityClients != 0 {
t.Fatalf("wrong number of new entity clients. Expected 0, got %d", monthRecord.NewClients.Counts.EntityClients)
}
if monthRecord.NewClients.Counts.NonEntityClients != 0 {
t.Fatalf("wrong number of new non entity clients. Expected 0, got %d", monthRecord.NewClients.Counts.NonEntityClients)
}
require.Equal(t, &activity.CountsRecord{}, monthRecord.Counts)
require.Equal(t, &activity.CountsRecord{}, monthRecord.NewClients.Counts)
}
// writeEntitySegment writes a single segment file with the given time and index for an entity

View File

@@ -9,6 +9,7 @@ import (
"errors"
"fmt"
"os"
"strconv"
"strings"
"time"
@@ -81,6 +82,17 @@ func (c *Core) enableAudit(ctx context.Context, entry *MountEntry, updateStorage
return fmt.Errorf("backend path must be specified")
}
if fallbackRaw, ok := entry.Options["fallback"]; ok {
fallback, err := parseutil.ParseBool(fallbackRaw)
if err != nil {
return fmt.Errorf("unable to enable audit device '%s', cannot parse supplied 'fallback' setting: %w", entry.Path, err)
}
// Reassigning the fallback value means we can ensure that the formatting
// of it as a string is consistent for future comparisons.
entry.Options["fallback"] = strconv.FormatBool(fallback)
}
// Update the audit table
c.auditLock.Lock()
defer c.auditLock.Unlock()
@@ -88,6 +100,8 @@ func (c *Core) enableAudit(ctx context.Context, entry *MountEntry, updateStorage
// Look for matching name
for _, ent := range c.audit.Entries {
switch {
case entry.Options["fallback"] == "true" && ent.Options["fallback"] == "true":
return fmt.Errorf("unable to enable audit device '%s', a fallback device already exists '%s'", entry.Path, ent.Path)
// Existing is sql/mysql/ new is sql/ or
// existing is sql/ and new is sql/mysql/
case strings.HasPrefix(ent.Path, entry.Path):
@@ -531,7 +545,7 @@ func (c *Core) newAuditBackend(ctx context.Context, entry *MountEntry, view logi
!disableEventLogger,
c.auditedHeaders)
if err != nil {
return nil, err
return nil, fmt.Errorf("unable to create new audit backend: %w", err)
}
if be == nil {
return nil, fmt.Errorf("nil backend returned from %q factory function", entry.Type)

View File

@@ -12,9 +12,9 @@ import (
"sync"
"time"
"github.com/armon/go-metrics"
"github.com/hashicorp/eventlogger"
"github.com/hashicorp/go-hclog"
"github.com/hashicorp/go-metrics"
"github.com/hashicorp/go-multierror"
"github.com/hashicorp/vault/audit"
"github.com/hashicorp/vault/helper/namespace"
@@ -32,32 +32,52 @@ type backendEntry struct {
type AuditBroker struct {
sync.RWMutex
backends map[string]backendEntry
logger hclog.Logger
// broker is used to register pipelines for all devices except a fallback device.
broker *eventlogger.Broker
// fallbackBroker is used to register a pipeline to be used as a fallback
// in situations where we cannot use the eventlogger.Broker to guarantee that
// the required number of sinks were successfully written to. This situation
// occurs when all the audit devices registered with the broker use filtering.
// NOTE: there should only ever be a single device registered on the fallbackBroker.
fallbackBroker *eventlogger.Broker
// fallbackName stores the name (path) of the audit device which has been configured
// as the fallback pipeline (its eventlogger.PipelineID).
fallbackName string
logger hclog.Logger
}
// NewAuditBroker creates a new audit broker
func NewAuditBroker(log hclog.Logger, useEventLogger bool) (*AuditBroker, error) {
var eventBroker *eventlogger.Broker
var fallbackBroker *eventlogger.Broker
var err error
// The reason for this check is due to 1.15.x supporting the env var:
// 'VAULT_AUDIT_DISABLE_EVENTLOGGER'
// When NewAuditBroker is called, it is supplied a bool to determine whether
// we initialize the broker, which are left nil otherwise.
// we initialize the broker (and fallback broker), which are left nil otherwise.
// In 1.16.x this check should go away and the env var removed.
if useEventLogger {
eventBroker, err = eventlogger.NewBroker()
if err != nil {
return nil, fmt.Errorf("error creating event broker for audit events: %w", err)
}
// Set up the broker that will support a single fallback device.
fallbackBroker, err = eventlogger.NewBroker()
if err != nil {
return nil, fmt.Errorf("error creating event fallback broker for audit event: %w", err)
}
}
b := &AuditBroker{
backends: make(map[string]backendEntry),
logger: log,
broker: eventBroker,
backends: make(map[string]backendEntry),
logger: log,
broker: eventBroker,
fallbackBroker: fallbackBroker,
}
return b, nil
}
@@ -74,19 +94,42 @@ func (a *AuditBroker) Register(name string, b audit.Backend, local bool) error {
return fmt.Errorf("%s: name is required: %w", op, event.ErrInvalidParameter)
}
// If the backend is already registered, we cannot re-register it.
if a.isRegistered(name) {
return fmt.Errorf("%s: backend already registered '%s'", op, name)
}
// Fallback devices are singleton instances, we cannot register more than one or overwrite the existing one.
if b.IsFallback() && a.fallbackBroker.IsAnyPipelineRegistered(eventlogger.EventType(event.AuditType.String())) {
existing, err := a.existingFallbackName()
if err != nil {
return fmt.Errorf("%s: existing fallback device already registered: %w", op, err)
}
return fmt.Errorf("%s: existing fallback device already registered: %q", op, existing)
}
// The reason for this check is due to 1.15.x supporting the env var:
// 'VAULT_AUDIT_DISABLE_EVENTLOGGER'
// When NewAuditBroker is called, it is supplied a bool to determine whether
// we initialize the broker, which are left nil otherwise.
// we initialize the broker (and fallback broker), which are left nil otherwise.
// In 1.16.x this check should go away and the env var removed.
if a.broker != nil {
if name != b.Name() {
return fmt.Errorf("%s: audit registration failed due to device name mismatch: %q, %q", op, name, b.Name())
}
err := a.register(name, b)
if err != nil {
return fmt.Errorf("%s: unable to register device for %q: %w", op, name, err)
switch {
case b.IsFallback():
err := a.registerFallback(name, b)
if err != nil {
return fmt.Errorf("%s: unable to register fallback device for %q: %w", op, name, err)
}
default:
err := a.register(name, b)
if err != nil {
return fmt.Errorf("%s: unable to register device for %q: %w", op, name, err)
}
}
}
@@ -110,6 +153,12 @@ func (a *AuditBroker) Deregister(ctx context.Context, name string) error {
return fmt.Errorf("%s: name is required: %w", op, event.ErrInvalidParameter)
}
// If the backend isn't actually registered, then there's nothing to do.
// We don't return any error so that Deregister can be idempotent.
if !a.isRegistered(name) {
return nil
}
// Remove the Backend from the map first, so that if an error occurs while
// removing the pipeline and nodes, we can quickly exit this method with
// the error.
@@ -118,23 +167,37 @@ func (a *AuditBroker) Deregister(ctx context.Context, name string) error {
// The reason for this check is due to 1.15.x supporting the env var:
// 'VAULT_AUDIT_DISABLE_EVENTLOGGER'
// When NewAuditBroker is called, it is supplied a bool to determine whether
// we initialize the broker, which are left nil otherwise.
// we initialize the broker (and fallback broker), which are left nil otherwise.
// In 1.16.x this check should go away and the env var removed.
if a.broker != nil {
err := a.deregister(ctx, name)
if err != nil {
return fmt.Errorf("%s: deregistration failed for audit device %q: %w", op, name, err)
switch {
case name == a.fallbackName:
err := a.deregisterFallback(ctx, name)
if err != nil {
return fmt.Errorf("%s: deregistration failed for fallback audit device %q: %w", op, name, err)
}
default:
err := a.deregister(ctx, name)
if err != nil {
return fmt.Errorf("%s: deregistration failed for audit device %q: %w", op, name, err)
}
}
}
return nil
}
// IsRegistered is used to check if a given audit backend is registered
// IsRegistered is used to check if a given audit backend is registered.
func (a *AuditBroker) IsRegistered(name string) bool {
a.RLock()
defer a.RUnlock()
return a.isRegistered(name)
}
// isRegistered is used to check if a given audit backend is registered.
// This method should be used within the AuditBroker to prevent locking issues.
func (a *AuditBroker) isRegistered(name string) bool {
_, ok := a.backends[name]
return ok
}
@@ -236,6 +299,9 @@ func (a *AuditBroker) LogRequest(ctx context.Context, in *logical.LogInput, head
e.Data = in
// There may be cases where only the fallback device was added but no other
// normal audit devices, so check if the broker had an audit based pipeline
// registered before trying to send to it.
var status eventlogger.Status
if a.broker.IsAnyPipelineRegistered(eventlogger.EventType(event.AuditType.String())) {
status, err = a.broker.Send(ctx, eventlogger.EventType(event.AuditType.String()), e)
@@ -255,6 +321,15 @@ func (a *AuditBroker) LogRequest(ctx context.Context, in *logical.LogInput, head
retErr = multierror.Append(retErr, multierror.Append(errors.New("error during audit pipeline processing"), status.Warnings...))
return retErr.ErrorOrNil()
}
// If a fallback device is registered we can rely on that to 'catch all'
// and also the broker level guarantee for completed sinks.
if a.fallbackBroker.IsAnyPipelineRegistered(eventlogger.EventType(event.AuditType.String())) {
status, err = a.fallbackBroker.Send(ctx, eventlogger.EventType(event.AuditType.String()), e)
if err != nil {
retErr = multierror.Append(retErr, multierror.Append(fmt.Errorf("auditing request to fallback device failed: %w", err), status.Warnings...))
}
}
}
}
@@ -349,6 +424,9 @@ func (a *AuditBroker) LogResponse(ctx context.Context, in *logical.LogInput, hea
defer auditCancel()
auditContext = namespace.ContextWithNamespace(auditContext, ns)
// There may be cases where only the fallback device was added but no other
// normal audit devices, so check if the broker had an audit based pipeline
// registered before trying to send to it.
var status eventlogger.Status
if a.broker.IsAnyPipelineRegistered(eventlogger.EventType(event.AuditType.String())) {
status, err = a.broker.Send(auditContext, eventlogger.EventType(event.AuditType.String()), e)
@@ -368,6 +446,15 @@ func (a *AuditBroker) LogResponse(ctx context.Context, in *logical.LogInput, hea
retErr = multierror.Append(retErr, multierror.Append(errors.New("error during audit pipeline processing"), status.Warnings...))
return retErr.ErrorOrNil()
}
// If a fallback device is registered we can rely on that to 'catch all'
// and also the broker level guarantee for completed sinks.
if a.fallbackBroker.IsAnyPipelineRegistered(eventlogger.EventType(event.AuditType.String())) {
status, err = a.fallbackBroker.Send(auditContext, eventlogger.EventType(event.AuditType.String()), e)
if err != nil {
retErr = multierror.Append(retErr, multierror.Append(fmt.Errorf("auditing response to fallback device failed: %w", err), status.Warnings...))
}
}
}
}
@@ -391,13 +478,19 @@ func (a *AuditBroker) Invalidate(ctx context.Context, key string) {
// guarantee provided by setting the threshold to 1, and must set it to 0.
// If you are registering an audit device, you should first check if that backend
// does not have filtering before querying the backends via requiredSuccessThresholdSinks.
// backends may also contain a fallback device, which should be ignored as it is
// handled by the fallbackBroker.
func (a *AuditBroker) requiredSuccessThresholdSinks() int {
threshold := 0
// We might need to check over all the existing backends to discover if any
// don't use filtering.
for _, be := range a.backends {
if !be.backend.HasFiltering() {
switch {
case be.backend.IsFallback():
// Ignore fallback devices as they're handled by a separate broker.
continue
case !be.backend.HasFiltering():
threshold = 1
break
}
@@ -432,6 +525,65 @@ func registerNodesAndPipeline(broker *eventlogger.Broker, b audit.Backend) error
return nil
}
// existingFallbackName returns the name of the fallback device which is registered
// with the AuditBroker.
func (a *AuditBroker) existingFallbackName() (string, error) {
const op = "vault.(AuditBroker).existingFallbackName"
for _, be := range a.backends {
if be.backend.IsFallback() {
return be.backend.Name(), nil
}
}
return "", fmt.Errorf("%s: existing fallback device name is missing", op)
}
// registerFallback can be used to register a fallback device, it will also
// configure the success threshold required for sinks.
func (a *AuditBroker) registerFallback(name string, backend audit.Backend) error {
const op = "vault.(AuditBroker).registerFallback"
err := registerNodesAndPipeline(a.fallbackBroker, backend)
if err != nil {
return fmt.Errorf("%s: fallback device pipeline registration error: %w", op, err)
}
// Store the name of the fallback audit device so that we can check when
// deregistering if the device is the single fallback one.
a.fallbackName = backend.Name()
// We need to turn on the threshold for the fallback broker, so we can
// guarantee it ends up somewhere
err = a.fallbackBroker.SetSuccessThresholdSinks(eventlogger.EventType(event.AuditType.String()), 1)
if err != nil {
return fmt.Errorf("%s: unable to configure fallback sink success threshold (1) for %q: %w", op, name, err)
}
return nil
}
// deregisterFallback can be used to deregister a fallback audit device, it will
// also configure the success threshold required for sinks.
func (a *AuditBroker) deregisterFallback(ctx context.Context, name string) error {
const op = "vault.(AuditBroker).deregisterFallback"
err := a.fallbackBroker.SetSuccessThresholdSinks(eventlogger.EventType(event.AuditType.String()), 0)
if err != nil {
return fmt.Errorf("%s: unable to configure fallback sink success threshold (0) for %q: %w", op, name, err)
}
_, err = a.fallbackBroker.RemovePipelineAndNodes(ctx, eventlogger.EventType(event.AuditType.String()), eventlogger.PipelineID(name))
if err != nil {
return fmt.Errorf("%s: unable to deregister fallback device %q: %w", op, name, err)
}
// Clear the fallback device name now we've deregistered.
a.fallbackName = ""
return nil
}
// register can be used to register a normal audit device, it will also calculate
// and configure the success threshold required for sinks.
func (a *AuditBroker) register(name string, backend audit.Backend) error {

View File

@@ -141,3 +141,118 @@ func TestAuditBroker_Deregister_SuccessThresholdSinks(t *testing.T) {
require.True(t, ok)
require.Equal(t, 1, res)
}
// TestAuditBroker_Register_Fallback ensures we can register a fallback device.
func TestAuditBroker_Register_Fallback(t *testing.T) {
t.Parallel()
l := corehelpers.NewTestLogger(t)
a, err := NewAuditBroker(l, true)
require.NoError(t, err)
require.NotNil(t, a)
path := "juan/"
fallbackBackend := testAuditBackend(t, path, map[string]string{"fallback": "true"})
err = a.Register(path, fallbackBackend, false)
require.NoError(t, err)
require.True(t, a.fallbackBroker.IsAnyPipelineRegistered(eventlogger.EventType(event.AuditType.String())))
require.Equal(t, path, a.fallbackName)
threshold, found := a.fallbackBroker.SuccessThresholdSinks(eventlogger.EventType(event.AuditType.String()))
require.True(t, found)
require.Equal(t, 1, threshold)
}
// TestAuditBroker_Register_FallbackMultiple tests that trying to register more
// than a single fallback device results in the correct error.
func TestAuditBroker_Register_FallbackMultiple(t *testing.T) {
t.Parallel()
l := corehelpers.NewTestLogger(t)
a, err := NewAuditBroker(l, true)
require.NoError(t, err)
require.NotNil(t, a)
path1 := "juan1/"
fallbackBackend1 := testAuditBackend(t, path1, map[string]string{"fallback": "true"})
err = a.Register(path1, fallbackBackend1, false)
require.NoError(t, err)
require.True(t, a.fallbackBroker.IsAnyPipelineRegistered(eventlogger.EventType(event.AuditType.String())))
require.Equal(t, path1, a.fallbackName)
path2 := "juan2/"
fallbackBackend2 := testAuditBackend(t, path2, map[string]string{"fallback": "true"})
err = a.Register(path1, fallbackBackend2, false)
require.Error(t, err)
require.EqualError(t, err, "vault.(AuditBroker).Register: backend already registered 'juan1/'")
require.True(t, a.fallbackBroker.IsAnyPipelineRegistered(eventlogger.EventType(event.AuditType.String())))
require.Equal(t, path1, a.fallbackName)
}
// TestAuditBroker_Deregister_Fallback ensures that we can deregister a fallback
// device successfully.
func TestAuditBroker_Deregister_Fallback(t *testing.T) {
t.Parallel()
l := corehelpers.NewTestLogger(t)
a, err := NewAuditBroker(l, true)
require.NoError(t, err)
require.NotNil(t, a)
path := "juan/"
fallbackBackend := testAuditBackend(t, path, map[string]string{"fallback": "true"})
err = a.Register(path, fallbackBackend, false)
require.NoError(t, err)
require.True(t, a.fallbackBroker.IsAnyPipelineRegistered(eventlogger.EventType(event.AuditType.String())))
require.Equal(t, path, a.fallbackName)
threshold, found := a.fallbackBroker.SuccessThresholdSinks(eventlogger.EventType(event.AuditType.String()))
require.True(t, found)
require.Equal(t, 1, threshold)
err = a.Deregister(context.Background(), path)
require.NoError(t, err)
require.False(t, a.fallbackBroker.IsAnyPipelineRegistered(eventlogger.EventType(event.AuditType.String())))
require.Equal(t, "", a.fallbackName)
threshold, found = a.fallbackBroker.SuccessThresholdSinks(eventlogger.EventType(event.AuditType.String()))
require.True(t, found)
require.Equal(t, 0, threshold)
}
// TestAuditBroker_Deregister_Multiple ensures that we can call deregister multiple
// times without issue if is no matching backend registered.
func TestAuditBroker_Deregister_Multiple(t *testing.T) {
t.Parallel()
l := corehelpers.NewTestLogger(t)
a, err := NewAuditBroker(l, true)
require.NoError(t, err)
require.NotNil(t, a)
err = a.Deregister(context.Background(), "foo")
require.NoError(t, err)
err = a.Deregister(context.Background(), "foo2")
require.NoError(t, err)
}
// TestAuditBroker_Register_MultipleFails checks for failure when we try to
// re-register an audit backend.
func TestAuditBroker_Register_MultipleFails(t *testing.T) {
t.Parallel()
l := corehelpers.NewTestLogger(t)
a, err := NewAuditBroker(l, true)
require.NoError(t, err)
require.NotNil(t, a)
path := "b2-no-filter"
noFilterBackend := testAuditBackend(t, path, map[string]string{})
err = a.Register(path, noFilterBackend, false)
require.NoError(t, err)
err = a.Register(path, noFilterBackend, false)
require.Error(t, err)
require.EqualError(t, err, "vault.(AuditBroker).Register: backend already registered 'b2-no-filter'")
}

View File

@@ -237,6 +237,69 @@ func TestCore_EnableAudit_Local(t *testing.T) {
}
}
// TestAudit_enableAudit_fallback_invalid ensures that supplying a bad value for
// 'fallback' in options gives us the correct error.
func TestAudit_enableAudit_fallback_invalid(t *testing.T) {
entry := &MountEntry{
Path: "noop/",
Options: map[string]string{
"fallback": "juan",
},
}
cluster := NewTestCluster(t, nil, nil)
cluster.Start()
defer cluster.Cleanup()
core := cluster.Cores[0]
core.auditBackends["noop"] = corehelpers.NoopAuditFactory(nil)
err := core.enableAudit(context.Background(), entry, false)
require.Error(t, err)
require.EqualError(t, err, "unable to enable audit device 'noop/', cannot parse supplied 'fallback' setting: cannot parse '' as bool: strconv.ParseBool: parsing \"juan\": invalid syntax")
}
// TestAudit_enableAudit_fallback_two ensures trying to enable a second fallback
// device returns the correct error.
func TestAudit_enableAudit_fallback_two(t *testing.T) {
entry1 := &MountEntry{
Table: auditTableType,
Path: "noop1/",
Type: "noop",
UUID: "abcd",
Accessor: "noop1-abcd",
NamespaceID: namespace.RootNamespaceID,
Options: map[string]string{
"fallback": "TRUE",
},
namespace: namespace.RootNamespace,
}
entry2 := &MountEntry{
Table: auditTableType,
Path: "noop2/",
Type: "noop",
UUID: "abcd",
Accessor: "noop2-abcd",
NamespaceID: namespace.RootNamespaceID,
Options: map[string]string{
"fallback": "1",
},
namespace: namespace.RootNamespace,
}
cluster := NewTestCluster(t, nil, nil)
cluster.Start()
defer cluster.Cleanup()
core := cluster.Cores[0]
core.auditBackends["noop"] = corehelpers.NoopAuditFactory(nil)
ctx := namespace.ContextWithNamespace(context.Background(), namespace.RootNamespace)
err := core.enableAudit(ctx, entry1, false)
require.NoError(t, err)
err = core.enableAudit(ctx, entry2, false)
require.Error(t, err)
require.EqualError(t, err, "unable to enable audit device 'noop2/', a fallback device already exists 'noop1/'")
}
func TestCore_DisableAudit(t *testing.T) {
c, keys, _ := TestCoreUnsealed(t)
c.auditBackends["noop"] = corehelpers.NoopAuditFactory(nil)

View File

@@ -270,7 +270,8 @@ func (cl *Listener) TLSConfig(ctx context.Context) (*tls.Config, error) {
}
// Run starts the tcp listeners and will accept connections until stop is
// called. This function blocks so should be called in a goroutine.
// called. This function does not block and will start the listeners in
// separate goroutines.
func (cl *Listener) Run(ctx context.Context) error {
// Get our TLS config
tlsConfig, err := cl.TLSConfig(ctx)

View File

@@ -526,7 +526,7 @@ type Core struct {
// uiConfig contains UI configuration
uiConfig *UIConfig
customMessageManager *uicustommessages.Manager
customMessageManager CustomMessagesManager
// rawEnabled indicates whether the Raw endpoint is enabled
rawEnabled bool

View File

@@ -0,0 +1,20 @@
// Copyright (c) HashiCorp, Inc.
// SPDX-License-Identifier: BUSL-1.1
package vault
import (
"context"
uicustommessages "github.com/hashicorp/vault/vault/ui_custom_messages"
)
// CustomMessagesManager is the interface used by the vault package when
// interacting with a uicustommessages.Manager instance.
type CustomMessagesManager interface {
FindMessages(context.Context, uicustommessages.FindFilter) ([]uicustommessages.Message, error)
AddMessage(context.Context, uicustommessages.Message) (*uicustommessages.Message, error)
ReadMessage(context.Context, string) (*uicustommessages.Message, error)
UpdateMessage(context.Context, uicustommessages.Message) (*uicustommessages.Message, error)
DeleteMessage(context.Context, string) error
}

View File

@@ -560,6 +560,9 @@ func testSystemBackend_PluginReload(t *testing.T, reqData map[string]interface{}
if resp.Data["reload_id"] == nil {
t.Fatal("no reload_id in response")
}
if len(resp.Warnings) != 0 {
t.Fatal(resp.Warnings)
}
for i := 0; i < 2; i++ {
// Ensure internal backed value is reset
@@ -578,6 +581,35 @@ func testSystemBackend_PluginReload(t *testing.T, reqData map[string]interface{}
}
}
func TestSystemBackend_PluginReload_WarningIfNoneReloaded(t *testing.T) {
cluster := testSystemBackendMock(t, 1, 2, logical.TypeLogical, "v5")
defer cluster.Cleanup()
core := cluster.Cores[0]
client := core.Client
for _, backendType := range []logical.BackendType{logical.TypeLogical, logical.TypeCredential} {
t.Run(backendType.String(), func(t *testing.T) {
// Perform plugin reload
resp, err := client.Logical().Write("sys/plugins/reload/backend", map[string]any{
"plugin": "does-not-exist",
})
if err != nil {
t.Fatalf("err: %v", err)
}
if resp == nil {
t.Fatalf("bad: %v", resp)
}
if resp.Data["reload_id"] == nil {
t.Fatal("no reload_id in response")
}
if len(resp.Warnings) == 0 {
t.Fatal("expected warning")
}
})
}
}
// testSystemBackendMock returns a systemBackend with the desired number
// of mounted mock plugin backends. numMounts alternates between different
// ways of providing the plugin_name.

View File

@@ -50,6 +50,7 @@ import (
"github.com/hashicorp/vault/sdk/helper/wrapping"
"github.com/hashicorp/vault/sdk/logical"
"github.com/hashicorp/vault/vault/plugincatalog"
uicustommessages "github.com/hashicorp/vault/vault/ui_custom_messages"
"github.com/hashicorp/vault/version"
"github.com/mitchellh/mapstructure"
"golang.org/x/crypto/sha3"
@@ -142,8 +143,8 @@ func NewSystemBackend(core *Core, logger log.Logger, config *logical.BackendConf
"wrapping/pubkey",
"replication/status",
"internal/specs/openapi",
"internal/ui/custom-messages",
"internal/ui/custom-messages/*",
"internal/ui/authenticated-messages",
"internal/ui/unauthenticated-messages",
"internal/ui/mounts",
"internal/ui/mounts/*",
"internal/ui/namespaces",
@@ -737,11 +738,24 @@ func (b *SystemBackend) handlePluginReloadUpdate(ctx context.Context, req *logic
return logical.ErrorResponse("plugin or mounts must be provided"), nil
}
resp := logical.Response{
Data: map[string]interface{}{
"reload_id": req.ID,
},
}
if pluginName != "" {
err := b.Core.reloadMatchingPlugin(ctx, pluginName)
reloaded, err := b.Core.reloadMatchingPlugin(ctx, pluginName)
if err != nil {
return nil, err
}
if reloaded == 0 {
if scope == globalScope {
resp.AddWarning("no plugins were reloaded locally (but they may be reloaded on other nodes)")
} else {
resp.AddWarning("no plugins were reloaded")
}
}
} else if len(pluginMounts) > 0 {
err := b.Core.reloadMatchingPluginMounts(ctx, pluginMounts)
if err != nil {
@@ -749,20 +763,14 @@ func (b *SystemBackend) handlePluginReloadUpdate(ctx context.Context, req *logic
}
}
r := logical.Response{
Data: map[string]interface{}{
"reload_id": req.ID,
},
}
if scope == globalScope {
err := handleGlobalPluginReload(ctx, b.Core, req.ID, pluginName, pluginMounts)
if err != nil {
return nil, err
}
return logical.RespondWithStatusCode(&r, req, http.StatusAccepted)
return logical.RespondWithStatusCode(&resp, req, http.StatusAccepted)
}
return &r, nil
return &resp, nil
}
func (b *SystemBackend) handlePluginRuntimeCatalogUpdate(ctx context.Context, _ *logical.Request, d *framework.FieldData) (*logical.Response, error) {
@@ -4428,6 +4436,87 @@ func hasMountAccess(ctx context.Context, acl *ACL, path string) bool {
return aclCapabilitiesGiven
}
// pathInternalUIAuthenticatedMessages finds all of the active messages whose
// Authenticated property is set to true in the current namespace (based on the
// provided context.Context) or in any ancestor namespace all the way up to the
// root namespace.
func (b *SystemBackend) pathInternalUIAuthenticatedMessages(ctx context.Context, req *logical.Request, d *framework.FieldData) (*logical.Response, error) {
// Make sure that the request includes a Vault token.
var tokenEntry *logical.TokenEntry
if token := req.ClientToken; token != "" {
tokenEntry, _ = b.Core.LookupToken(ctx, token)
}
if tokenEntry == nil {
return logical.ListResponseWithInfo([]string{}, map[string]any{}), nil
}
filter := uicustommessages.FindFilter{
IncludeAncestors: true,
}
filter.Active(true)
filter.Authenticated(true)
return b.pathInternalUICustomMessagesCommon(ctx, filter)
}
// pathInternalUIUnauthenticatedMessages finds all of the active messages whose
// Authenticated property is set to false in the current namespace (based on the
// provided context.Context) or in any ancestor namespace all the way up to the
// root namespace.
func (b *SystemBackend) pathInternalUIUnauthenticatedMessages(ctx context.Context, req *logical.Request, d *framework.FieldData) (*logical.Response, error) {
filter := uicustommessages.FindFilter{
IncludeAncestors: true,
}
filter.Active(true)
filter.Authenticated(false)
return b.pathInternalUICustomMessagesCommon(ctx, filter)
}
// pathInternalUICustomMessagesCommon takes care of finding the custom messages
// that meet the criteria set in the provided uicustommessages.FindFilter.
func (b *SystemBackend) pathInternalUICustomMessagesCommon(ctx context.Context, filter uicustommessages.FindFilter) (*logical.Response, error) {
messages, err := b.Core.customMessageManager.FindMessages(ctx, filter)
if err != nil {
return logical.ErrorResponse("failed to retrieve custom messages: %w", err), nil
}
keys := []string{}
keyInfo := map[string]any{}
for _, message := range messages {
keys = append(keys, message.ID)
var endTimeFormatted any
if message.EndTime != nil {
endTimeFormatted = message.EndTime.Format(time.RFC3339Nano)
}
var linkFormatted map[string]string = nil
if message.Link != nil {
linkFormatted = make(map[string]string)
linkFormatted[message.Link.Title] = message.Link.Href
}
keyInfo[message.ID] = map[string]any{
"title": message.Title,
"message": message.Message,
"authenticated": message.Authenticated,
"type": message.Type,
"start_time": message.StartTime.Format(time.RFC3339Nano),
"end_time": endTimeFormatted,
"link": linkFormatted,
"options": message.Options,
}
}
return logical.ListResponseWithInfo(keys, keyInfo), nil
}
func (b *SystemBackend) pathInternalUIMountsRead(ctx context.Context, req *logical.Request, d *framework.FieldData) (*logical.Response, error) {
ns, err := namespace.FromContext(ctx)
if err != nil {

View File

@@ -422,28 +422,24 @@ func (b *SystemBackend) handleCreateCustomMessages(ctx context.Context, req *log
return logical.ErrorResponse(err.Error()), nil
}
if len(linkMap) > 1 {
return logical.ErrorResponse("invalid number of elements in link parameter value; only a single element can be provided"), nil
}
var link *uicustommessages.MessageLink
if linkMap != nil {
link = &uicustommessages.MessageLink{}
linkTitle, ok := linkMap["title"]
if !ok {
return logical.ErrorResponse("missing title in link parameter value"), nil
}
for k, v := range linkMap {
href, ok := v.(string)
if !ok {
return logical.ErrorResponse(fmt.Sprintf("invalid url for %q key in link parameter value", k)), nil
}
link.Title, ok = linkTitle.(string)
if !ok {
return logical.ErrorResponse("invalid title value in link parameter value"), nil
}
link.Title = k
link.Href = href
linkHref, ok := linkMap["href"]
if !ok {
return logical.ErrorResponse("missing href in link parameter value"), nil
}
link.Href, ok = linkHref.(string)
if !ok {
return logical.ErrorResponse("invalid href value in link parameter value"), nil
break
}
}
@@ -509,6 +505,13 @@ func (b *SystemBackend) handleReadCustomMessage(ctx context.Context, req *logica
endTimeResponse = message.EndTime.Format(time.RFC3339Nano)
}
var linkResponse map[string]string = nil
if message.Link != nil {
linkResponse = make(map[string]string)
linkResponse[message.Link.Title] = message.Link.Href
}
return &logical.Response{
Data: map[string]any{
"id": id,
@@ -517,7 +520,7 @@ func (b *SystemBackend) handleReadCustomMessage(ctx context.Context, req *logica
"message": message.Message,
"start_time": message.StartTime.Format(time.RFC3339Nano),
"end_time": endTimeResponse,
"link": message.Link,
"link": linkResponse,
"options": message.Options,
"active": message.Active(),
"title": message.Title,
@@ -558,28 +561,24 @@ func (b *SystemBackend) handleUpdateCustomMessage(ctx context.Context, req *logi
return logical.ErrorResponse(err.Error()), nil
}
if len(linkMap) > 1 {
return logical.ErrorResponse("invalid number of elements in link parameter value; only a single element can be provided"), nil
}
var link *uicustommessages.MessageLink
if linkMap != nil {
link = &uicustommessages.MessageLink{}
linkTitle, ok := linkMap["title"]
if !ok {
return logical.ErrorResponse("missing title in link parameter value"), nil
}
for k, v := range linkMap {
href, ok := v.(string)
if !ok {
return logical.ErrorResponse("invalid url for %q key link parameter value", k), nil
}
link.Title, ok = linkTitle.(string)
if !ok {
return logical.ErrorResponse("invalid title value in link parameter value"), nil
}
link.Title = k
link.Href = href
linkHref, ok := linkMap["href"]
if !ok {
return logical.ErrorResponse("missing href in link parameter value"), nil
}
link.Href, ok = linkHref.(string)
if !ok {
return logical.ErrorResponse("invalid href value in link parameter value"), nil
break
}
}

View File

@@ -302,6 +302,25 @@ func TestHandleCreateCustomMessage(t *testing.T) {
},
errorExpected: true,
},
{
name: "link-parameter-href-invalid",
fieldRawUpdate: map[string]any{
"link": map[string]any{
"click here": []int{},
},
},
errorExpected: true,
},
{
name: "link-parameter-multiple-links",
fieldRawUpdate: map[string]any{
"link": map[string]any{
"click here": "http://example.org",
"click here 2": "http://ping.net",
},
},
errorExpected: true,
},
{
name: "options-parameter-invalid",
fieldRawUpdate: map[string]any{
@@ -321,9 +340,8 @@ func TestHandleCreateCustomMessage(t *testing.T) {
"options": map[string]any{
"color": "red",
},
"link": map[string]any{
"title": "Details",
"href": "https://server.com/details",
"link": map[string]string{
"Details": "https://server.com/details",
},
},
},
@@ -373,14 +391,24 @@ func TestHandleCreateCustomMessage(t *testing.T) {
assert.Contains(t, resp.Data, "start_time", testcase.name)
assert.Contains(t, resp.Data, "end_time", testcase.name)
assert.Contains(t, resp.Data, "id", testcase.name)
if _, ok := testcase.fieldRawUpdate["authenticated"]; !ok {
assert.True(t, resp.Data["authenticated"].(bool), testcase.name)
}
assert.Contains(t, resp.Data, "options", testcase.name)
assert.Contains(t, resp.Data, "link", testcase.name)
_, ok := testcase.fieldRawUpdate["authenticated"]
assert.Equal(t, !ok, resp.Data["authenticated"].(bool), testcase.name)
if _, ok := testcase.fieldRawUpdate["type"]; !ok {
assert.Equal(t, resp.Data["type"], uicustommessages.BannerMessageType, testcase.name)
} else {
assert.Equal(t, resp.Data["type"], uicustommessages.ModalMessageType, testcase.name)
}
if _, ok := testcase.fieldRawUpdate["end_time"]; !ok {
assert.Nil(t, resp.Data["end_time"], testcase.name)
} else {
assert.NotNil(t, resp.Data["end_time"], testcase.name)
}
if _, ok := testcase.fieldRawUpdate["link"]; !ok {
assert.Nil(t, resp.Data["link"], testcase.name)
} else {
assert.NotNil(t, resp.Data["link"], testcase.name)
}
}
}
@@ -428,7 +456,10 @@ func TestHandleReadCustomMessage(t *testing.T) {
StartTime: earlier,
EndTime: &later,
Options: make(map[string]any),
Link: nil,
Link: &uicustommessages.MessageLink{
Title: "Click Here",
Href: "www.example.com",
},
}
message, err := backend.Core.customMessageManager.AddMessage(nsCtx, *message)
@@ -457,9 +488,12 @@ func TestHandleReadCustomMessage(t *testing.T) {
assert.Equal(t, resp.Data["active"], true)
assert.Contains(t, resp.Data, "end_time")
assert.NotNil(t, resp.Data["end_time"])
assert.Contains(t, resp.Data, "link")
assert.Equal(t, 1, len(resp.Data["link"].(map[string]string)))
// Change the message so that it doesn't have an end time.
message.EndTime = nil
message.Link = nil
message, err = backend.Core.customMessageManager.UpdateMessage(nsCtx, *message)
require.NoError(t, err)
require.NotNil(t, message)
@@ -474,6 +508,8 @@ func TestHandleReadCustomMessage(t *testing.T) {
assert.Equal(t, resp.Data["active"], true)
assert.Contains(t, resp.Data, "end_time")
assert.Nil(t, resp.Data["end_time"])
assert.Contains(t, resp.Data, "link")
assert.Nil(t, resp.Data["link"])
// Check that there's an error when trying to read a non-existant custom
// message.
@@ -538,7 +574,7 @@ func TestHandleUpdateCustomMessage(t *testing.T) {
endTime := now.Add(time.Hour).Format(time.RFC3339Nano)
startTime2 := now.UTC().Add(-2 * time.Hour).Format(time.RFC3339Nano)
storageEntryValue := fmt.Sprintf(`{"messages":{"xyz":{"id":"xyz","title":"title","message":"message","authenticated":true,"type":"%s","start_time":"%s","end_time":"%s","link":{},"options":{}}}}`, uicustommessages.ModalMessageType, startTime, endTime)
storageEntryValue := fmt.Sprintf(`{"messages":{"xyz":{"id":"xyz","title":"title","message":"message","authenticated":true,"type":"%s","start_time":"%s","end_time":"%s","link":null,"options":null}}}`, uicustommessages.ModalMessageType, startTime, endTime)
storageEntry := &logical.StorageEntry{
Key: "sys/config/ui/custom-messages",
@@ -595,8 +631,7 @@ func TestHandleUpdateCustomMessage(t *testing.T) {
"start_time": startTime,
"end_time": endTime,
"link": map[string]any{
"title": "link-title",
"href": "http://link.url.com",
"link-title": "http://link.url.com",
},
"options": map[string]any{},
},
@@ -704,6 +739,23 @@ func TestHandleUpdateCustomMessage(t *testing.T) {
"link": "link",
},
},
{
name: "link-parameter-url-invalid",
fieldRawUpdate: map[string]any{
"link": map[string]any{
"my-link": []int{},
},
},
},
{
name: "link-parameter-multiple-links",
fieldRawUpdate: map[string]any{
"link": map[string]any{
"click here": "http://example.org",
"click here 2": "http://ping.net",
},
},
},
{
name: "options-parameter-invalid",
fieldRawUpdate: map[string]any{

View File

@@ -2435,6 +2435,37 @@ func (b *SystemBackend) internalPaths() []*framework.Path {
HelpSynopsis: "Generate an OpenAPI 3 document of all mounted paths.",
},
{
Pattern: "internal/ui/authenticated-messages",
DisplayAttrs: &framework.DisplayAttributes{
OperationPrefix: "internal-ui",
OperationVerb: "read",
OperationSuffix: "authenticated-active-custom-messages",
},
Operations: map[logical.Operation]framework.OperationHandler{
logical.ReadOperation: &framework.PathOperation{
Callback: b.pathInternalUIAuthenticatedMessages,
Summary: "Retrieves Active post-login Custom Messages",
Responses: map[int][]framework.Response{
http.StatusOK: {{
Description: "OK",
Fields: map[string]*framework.FieldSchema{
"keys": {
Type: framework.TypeStringSlice,
Required: true,
},
"key_info": {
Type: framework.TypeMap,
Required: true,
},
},
}},
},
},
},
},
{
Pattern: "internal/ui/feature-flags",
@@ -2653,6 +2684,37 @@ func (b *SystemBackend) internalPaths() []*framework.Path {
HelpSynopsis: strings.TrimSpace(sysHelp["internal-ui-resultant-acl"][0]),
HelpDescription: strings.TrimSpace(sysHelp["internal-ui-resultant-acl"][1]),
},
{
Pattern: "internal/ui/unauthenticated-messages",
DisplayAttrs: &framework.DisplayAttributes{
OperationPrefix: "internal-ui",
OperationVerb: "read",
OperationSuffix: "unauthenticated-active-custom-messages",
},
Operations: map[logical.Operation]framework.OperationHandler{
logical.ReadOperation: &framework.PathOperation{
Callback: b.pathInternalUIUnauthenticatedMessages,
Summary: "Retrieves Active pre-login Custom Messages",
Responses: map[int][]framework.Response{
http.StatusOK: {{
Description: "OK",
Fields: map[string]*framework.FieldSchema{
"keys": {
Type: framework.TypeStringSlice,
Required: true,
},
"key_info": {
Type: framework.TypeMap,
Required: true,
},
},
}},
},
},
},
},
{
Pattern: "internal/ui/version",
DisplayAttrs: &framework.DisplayAttributes{

View File

@@ -44,8 +44,10 @@ import (
"github.com/hashicorp/vault/sdk/logical"
"github.com/hashicorp/vault/vault/plugincatalog"
"github.com/hashicorp/vault/vault/seal"
uicustommessages "github.com/hashicorp/vault/vault/ui_custom_messages"
"github.com/hashicorp/vault/version"
"github.com/mitchellh/mapstructure"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
@@ -6383,3 +6385,166 @@ func TestSystemBackend_pluginRuntime_CannotDeleteRuntimeWithReferencingPlugins(t
t.Fatalf("err: %v %v", err, resp.Error())
}
}
type testingCustomMessageManager struct {
findFilters []uicustommessages.FindFilter
}
func (m *testingCustomMessageManager) FindMessages(_ context.Context, filter uicustommessages.FindFilter) ([]uicustommessages.Message, error) {
m.findFilters = append(m.findFilters, filter)
return []uicustommessages.Message{}, nil
}
func (m *testingCustomMessageManager) AddMessage(_ context.Context, _ uicustommessages.Message) (*uicustommessages.Message, error) {
return nil, nil
}
func (m *testingCustomMessageManager) ReadMessage(_ context.Context, _ string) (*uicustommessages.Message, error) {
return nil, nil
}
func (m *testingCustomMessageManager) UpdateMessage(_ context.Context, _ uicustommessages.Message) (*uicustommessages.Message, error) {
return nil, nil
}
func (m *testingCustomMessageManager) DeleteMessage(_ context.Context, _ string) error {
return nil
}
// TestPathInternalUIUnauthenticatedMessages verifies the correct behaviour of
// the pathInternalUIUnauthenticatedMessages method, which is to call the
// FindMessages method of the Core.customMessagesManager field with a FindFilter
// that has the IncludeAncestors field set to true, the active field pointing to
// a true value, and the authenticated field pointing to a false value.
func TestPathInternalUIUnauthenticatedMessages(t *testing.T) {
testingCMM := &testingCustomMessageManager{}
backend := &SystemBackend{
Core: &Core{
customMessageManager: testingCMM,
},
}
resp, err := backend.pathInternalUIUnauthenticatedMessages(context.Background(), &logical.Request{}, &framework.FieldData{})
assert.NoError(t, err)
assert.NotNil(t, resp)
expectedFilter := uicustommessages.FindFilter{IncludeAncestors: true}
expectedFilter.Active(true)
expectedFilter.Authenticated(false)
assert.ElementsMatch(t, testingCMM.findFilters, []uicustommessages.FindFilter{expectedFilter})
}
// TestPathInternalUIAuthenticatedMessages verifies the correct behaviour of the
// pathInternalUIAuthenticatedMessages method, which is to first check if the
// request has a valid token included, then call the FindMessages method of the
// Core.customMessagesManager field with a FindFilter that has the
// IncludeAncestors field set to true, the active field pointing to a true
// value, and the authenticated field pointing to a true value. If the request
// does not have a valid token, the method behaves as if no messages meet the
// criteria.
func TestPathInternalUIAuthenticatedMessages(t *testing.T) {
testingCMM := &testingCustomMessageManager{}
testCore := TestCoreRaw(t)
_, _, token := testCoreUnsealed(t, testCore)
testCore.customMessageManager = testingCMM
backend := &SystemBackend{
Core: testCore,
}
nsCtx := namespace.ContextWithNamespace(context.Background(), namespace.RootNamespace)
// Check with a request that includes a valid token
resp, err := backend.pathInternalUIAuthenticatedMessages(nsCtx, &logical.Request{
ClientToken: token,
}, &framework.FieldData{})
assert.NoError(t, err)
assert.NotNil(t, resp)
expectedFilter := uicustommessages.FindFilter{
IncludeAncestors: true,
}
expectedFilter.Active(true)
expectedFilter.Authenticated(true)
assert.ElementsMatch(t, testingCMM.findFilters, []uicustommessages.FindFilter{expectedFilter})
// Now, check with a request that has no token: expecting no new filter
// in the testingCMM.
resp, err = backend.pathInternalUIAuthenticatedMessages(nsCtx, &logical.Request{}, &framework.FieldData{})
assert.NoError(t, err)
assert.NotNil(t, resp)
assert.NotContains(t, resp.Data, "keys")
assert.NotContains(t, resp.Data, "key_info")
assert.ElementsMatch(t, testingCMM.findFilters, []uicustommessages.FindFilter{expectedFilter})
// Finally, check with an invalid token in the request: again, expecting no
// new filter in the testingCMM.
resp, err = backend.pathInternalUIAuthenticatedMessages(nsCtx, &logical.Request{ClientToken: "invalid"}, &framework.FieldData{})
assert.NoError(t, err)
assert.NotNil(t, resp)
assert.NotContains(t, resp.Data, "keys")
assert.NotContains(t, resp.Data, "key_info")
assert.ElementsMatch(t, testingCMM.findFilters, []uicustommessages.FindFilter{expectedFilter})
}
// TestPathInternalUICustomMessagesCommon verifies the correct behaviour of the
// (*SystemBackend).pathInternalUICustomMessagesCommon method.
func TestPathInternalUICustomMessagesCommon(t *testing.T) {
var storage logical.Storage = &testingStorage{getFails: true}
testingCMM := uicustommessages.NewManager(storage)
backend := &SystemBackend{
Core: &Core{
customMessageManager: testingCMM,
},
}
// First, check that when an error occurs in the FindMessages method, it's
// handled correctly.
filter := uicustommessages.FindFilter{
IncludeAncestors: true,
}
filter.Active(true)
filter.Authenticated(false)
resp, err := backend.pathInternalUICustomMessagesCommon(context.Background(), filter)
assert.NoError(t, err)
assert.NotNil(t, resp)
assert.Contains(t, resp.Data, "error")
assert.Contains(t, resp.Data["error"], "failed to retrieve custom messages")
// Next, check that when no error occur and messages are returned by
// FindMessages that they are correctly translated.
storage = &logical.InmemStorage{}
backend.Core.customMessageManager = uicustommessages.NewManager(storage)
// Load some messages for the root namespace and a testNS namespace.
startTime := time.Now().Add(-1 * time.Hour).Format(time.RFC3339Nano)
endTime := time.Now().Add(time.Hour).Format(time.RFC3339Nano)
messagesTemplate := `{"messages":{"%[1]d01":{"id":"%[1]d01","title":"Title-%[1]d01","message":"Message of Title-%[1]d01","authenticated":false,"type":"banner","start_time":"%[2]s"},"%[1]d02":{"id":"%[1]d02","title":"Title-%[1]d02","message":"Message of Title-%[1]d02","authenticated":false,"type":"modal","start_time":"%[2]s","end_time":"%[3]s"},"%[1]d03":{"id":"%[1]d03","title":"Title-%[1]d03","message":"Message of Title-%[1]d03","authenticated":false,"type":"banner","start_time":"%[2]s","link":{"Link":"www.example.com"}}}}`
cmStorageEntry := &logical.StorageEntry{
Key: "sys/config/ui/custom-messages",
Value: []byte(fmt.Sprintf(messagesTemplate, 0, startTime, endTime)),
}
storage.Put(context.Background(), cmStorageEntry)
cmStorageEntry = &logical.StorageEntry{
Key: "namespaces/testNS/sys/config/ui/custom-messages",
Value: []byte(fmt.Sprintf(messagesTemplate, 1, startTime, endTime)),
}
storage.Put(context.Background(), cmStorageEntry)
resp, err = backend.pathInternalUICustomMessagesCommon(namespace.ContextWithNamespace(context.Background(), namespace.RootNamespace), filter)
assert.NoError(t, err)
assert.NotNil(t, resp)
assert.Contains(t, resp.Data, "keys")
assert.Equal(t, 3, len(resp.Data["keys"].([]string)))
assert.Contains(t, resp.Data, "key_info")
assert.Equal(t, 3, len(resp.Data["key_info"].(map[string]any)))
}

View File

@@ -322,7 +322,7 @@ const mountStateUnmounting = "unmounting"
// MountEntry is used to represent a mount table entry
type MountEntry struct {
Table string `json:"table"` // The table it belongs to
Path string `json:"path"` // Mount Path
Path string `json:"path"` // Mount Path, as provided in the mount API call but with a trailing slash, i.e. no auth/ or namespace prefix.
Type string `json:"type"` // Logical backend Type. NB: This is the plugin name, e.g. my-vault-plugin, NOT plugin type (e.g. auth).
Description string `json:"description"` // User-provided description
UUID string `json:"uuid"` // Barrier view UUID

View File

@@ -70,10 +70,10 @@ func (c *Core) reloadMatchingPluginMounts(ctx context.Context, mounts []string)
return errors
}
// reloadPlugin reloads all mounted backends that are of
// plugin pluginName (name of the plugin as registered in
// the plugin catalog).
func (c *Core) reloadMatchingPlugin(ctx context.Context, pluginName string) error {
// reloadMatchingPlugin reloads all mounted backends that are named pluginName
// (name of the plugin as registered in the plugin catalog). It returns the
// number of plugins that were reloaded and an error if any.
func (c *Core) reloadMatchingPlugin(ctx context.Context, pluginName string) (reloaded int, err error) {
c.mountsLock.RLock()
defer c.mountsLock.RUnlock()
c.authLock.RLock()
@@ -81,25 +81,49 @@ func (c *Core) reloadMatchingPlugin(ctx context.Context, pluginName string) erro
ns, err := namespace.FromContext(ctx)
if err != nil {
return err
return reloaded, err
}
// Filter mount entries that only matches the plugin name
for _, entry := range c.mounts.Entries {
// We dont reload mounts that are not in the same namespace
if ns.ID != entry.Namespace().ID {
continue
}
if entry.Type == pluginName || (entry.Type == "plugin" && entry.Config.PluginName == pluginName) {
err := c.reloadBackendCommon(ctx, entry, false)
if err != nil {
return err
return reloaded, err
}
reloaded++
c.logger.Info("successfully reloaded plugin", "plugin", pluginName, "namespace", entry.Namespace(), "path", entry.Path, "version", entry.Version)
} else if entry.Type == "database" {
// The combined database plugin is itself a secrets engine, but
// knowledge of whether a database plugin is in use within a particular
// mount is internal to the combined database plugin's storage, so
// we delegate the reload request with an internally routed request.
req := &logical.Request{
Operation: logical.UpdateOperation,
Path: entry.Path + "reload/" + pluginName,
}
resp, err := c.router.Route(ctx, req)
if err != nil {
return reloaded, err
}
if resp == nil {
return reloaded, fmt.Errorf("failed to reload %q database plugin(s) mounted under %s", pluginName, entry.Path)
}
if resp.IsError() {
return reloaded, fmt.Errorf("failed to reload %q database plugin(s) mounted under %s: %s", pluginName, entry.Path, resp.Error())
}
if count, ok := resp.Data["count"].(int); ok && count > 0 {
c.logger.Info("successfully reloaded database plugin(s)", "plugin", pluginName, "namespace", entry.Namespace(), "path", entry.Path, "connections", resp.Data["connections"])
reloaded += count
}
c.logger.Info("successfully reloaded plugin", "plugin", pluginName, "path", entry.Path, "version", entry.Version)
}
}
// Filter auth mount entries that ony matches the plugin name
for _, entry := range c.auth.Entries {
// We dont reload mounts that are not in the same namespace
if ns.ID != entry.Namespace().ID {
@@ -109,13 +133,14 @@ func (c *Core) reloadMatchingPlugin(ctx context.Context, pluginName string) erro
if entry.Type == pluginName || (entry.Type == "plugin" && entry.Config.PluginName == pluginName) {
err := c.reloadBackendCommon(ctx, entry, true)
if err != nil {
return err
return reloaded, err
}
reloaded++
c.logger.Info("successfully reloaded plugin", "plugin", entry.Accessor, "path", entry.Path, "version", entry.Version)
}
}
return nil
return reloaded, nil
}
// reloadBackendCommon is a generic method to reload a backend provided a

View File

@@ -26,6 +26,12 @@ const (
MaximumMessageCountPerNamespace int = 100
)
// nsManager is the NamespaceManager instance used to determine the set of
// Namespaces to consider when retrieving active Custom Message. This
// variable is re-assigned to point to a real NamespaceManager in the
// enterprise edition.
var nsManager NamespaceManager = &CommunityEditionNamespaceManager{}
// Manager is a struct that provides methods to manage messages stored in a
// logical.Storage.
type Manager struct {
@@ -223,10 +229,13 @@ func getNamespacesToSearch(ctx context.Context, filters FindFilter) ([]*namespac
// Add the current namespace based on the context.Context to nsList.
nsList = append(nsList, ns)
//if filters.IncludeAncestors {
// Add the parent, grand-parent, etc... namespaces all the way back up
// to the root namespace to nsList.
//}
if filters.IncludeAncestors {
parentNs := nsManager.GetParentNamespace(ns.Path)
for ; parentNs.ID != ns.ID; parentNs = nsManager.GetParentNamespace(ns.Path) {
ns = parentNs
nsList = append(nsList, ns)
}
}
return nsList, nil
}

View File

@@ -220,9 +220,41 @@ func TestGetNamespacesToSearch(t *testing.T) {
list, err = getNamespacesToSearch(namespace.ContextWithNamespace(context.Background(), namespace.RootNamespace), FindFilter{})
assert.NoError(t, err)
assert.NotNil(t, list)
assert.Equal(t, 1, len(list))
assert.Len(t, list, 1)
assert.Equal(t, namespace.RootNamespace, list[0])
// Verify with nsManager set to an instance of testNamespaceManager to
// ensure that it is used to calculate the list of namespaces.
currentNsManager := nsManager
defer func() {
nsManager = currentNsManager
}()
nsManager = &testNamespaceManager{
results: []namespace.Namespace{
{
ID: "ccc",
Path: "c/",
},
{
ID: "bbb",
Path: "b/",
},
{
ID: "aaa",
Path: "a/",
},
},
}
list, err = getNamespacesToSearch(namespace.ContextWithNamespace(context.Background(), &namespace.Namespace{ID: "ddd", Path: "d/"}), FindFilter{IncludeAncestors: true})
assert.NoError(t, err)
assert.Len(t, list, 5)
assert.Equal(t, list[0].Path, "d/")
assert.Equal(t, list[1].Path, "c/")
assert.Equal(t, list[2].Path, "b/")
assert.Equal(t, list[3].Path, "a/")
assert.Equal(t, list[4].Path, "")
}
// TestStorageKeyForNamespace verifies that the storageKeyForNamespace function
@@ -633,3 +665,24 @@ func (s *testingStorage) Put(_ context.Context, _ *logical.StorageEntry) error {
return nil
}
// testNamespaceManager is a perculiar type of NamespaceManager where it can be
// instantiated with the results that successive calls to its GetParentNamespace
// method will return.
type testNamespaceManager struct {
results []namespace.Namespace
}
// GetParentNamespace effectively pops namespaces from the results field in the
// receiver testNamespaceManager struct and returns them. Once all namespaces
// have been returns, it returns namespace.RootNamespace.
func (n *testNamespaceManager) GetParentNamespace(_ string) *namespace.Namespace {
if len(n.results) == 0 {
return namespace.RootNamespace
}
ns := n.results[0]
n.results = n.results[1:]
return &ns
}

View File

@@ -0,0 +1,24 @@
// Copyright (c) HashiCorp, Inc.
// SPDX-License-Identifier: BUSL-1.1
package uicustommessages
import "github.com/hashicorp/vault/helper/namespace"
// NamespaceManager is the interface needed of a NamespaceManager by this
// package. This interface allows setting a dummy NamespaceManager in the
// community edition that can be replaced with the real
// namespace.NamespaceManager in the enterprise edition.
type NamespaceManager interface {
GetParentNamespace(string) *namespace.Namespace
}
// CommunityEditionNamespaceManager is a struct that implements the
// NamespaceManager interface. This struct is used as a placeholder in the
// community edition.
type CommunityEditionNamespaceManager struct{}
// GetParentNamespace always returns namespace.RootNamespace.
func (n *CommunityEditionNamespaceManager) GetParentNamespace(_ string) *namespace.Namespace {
return namespace.RootNamespace
}

View File

@@ -0,0 +1,31 @@
// Copyright (c) HashiCorp, Inc.
// SPDX-License-Identifier: BUSL-1.1
package uicustommessages
import (
"testing"
"github.com/hashicorp/vault/helper/namespace"
"github.com/stretchr/testify/assert"
)
// TestCommunityEditionNamespaceManagerGetParentNamespace verifies that the
// (*CommunityEditionNamespaceManager).GetParentNamespace behaves as intended,
// which is to always return namespace.RootNamespace, regardless of the input.
func TestCommunityEditionNamespaceManagerGetParentNamespace(t *testing.T) {
testNsManager := &CommunityEditionNamespaceManager{}
// Verify root namespace
assert.Equal(t, namespace.RootNamespace, testNsManager.GetParentNamespace(namespace.RootNamespace.Path))
// Verify a different namespace
testNamespace := namespace.Namespace{
ID: "abc123",
Path: "test/",
}
assert.Equal(t, namespace.RootNamespace, testNsManager.GetParentNamespace(testNamespace.Path))
// Verify that even a random string results in the root namespace
assert.Equal(t, namespace.RootNamespace, testNsManager.GetParentNamespace("blah"))
}

View File

@@ -8,7 +8,7 @@ description: The `/sys/quotas/lease-count` endpoint is used to create, edit and
@include 'alerts/enterprise-only.mdx'
@include 'alerts/restricted-root.mdx'
@include 'alerts/restricted-admin.mdx'
The `/sys/quotas/lease-count` endpoint is used to create, edit and delete lease count quotas.

Some files were not shown because too many files have changed in this diff Show More