mirror of
https://github.com/optim-enterprises-bv/vault.git
synced 2025-10-30 10:12:35 +00:00
Identity: add duplicate reporting to logs (#29325)
* Identity: add duplicate reporting to logs * Add changelog * Fix breaking Ent change * Revert changes to existing ent test helper arguments as they will break on merge * Update changelog/29325.txt Co-authored-by: Bianca <48203644+biazmoreira@users.noreply.github.com> --------- Co-authored-by: Bianca <48203644+biazmoreira@users.noreply.github.com>
This commit is contained in:
4
changelog/29325.txt
Normal file
4
changelog/29325.txt
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
```release-note:improvement
|
||||||
|
identity: Added reporting in Vault logs during unseal to help identify any
|
||||||
|
duplicate identify resources in storage.
|
||||||
|
```
|
||||||
@@ -6,7 +6,11 @@ package vault
|
|||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"errors"
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"sort"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/hashicorp/go-hclog"
|
||||||
log "github.com/hashicorp/go-hclog"
|
log "github.com/hashicorp/go-hclog"
|
||||||
"github.com/hashicorp/vault/helper/identity"
|
"github.com/hashicorp/vault/helper/identity"
|
||||||
)
|
)
|
||||||
@@ -83,3 +87,277 @@ func (r *errorResolver) ResolveAliases(ctx context.Context, parent *identity.Ent
|
|||||||
|
|
||||||
return errDuplicateIdentityName
|
return errDuplicateIdentityName
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// duplicateReportingErrorResolver collects duplicate information and optionally
|
||||||
|
// logs a report on all the duplicates. We don't embed an errorResolver here
|
||||||
|
// because we _don't_ want it's side effect of warning on just some duplicates
|
||||||
|
// as we go as that's confusing when we have a more comprehensive report. The
|
||||||
|
// only other behavior it has is to return a constant error which we can just do
|
||||||
|
// ourselves.
|
||||||
|
type duplicateReportingErrorResolver struct {
|
||||||
|
// seen* track the unique factors for each identity artifact, so
|
||||||
|
// that we can report on any duplication including different-case duplicates
|
||||||
|
// when in case-sensitive mode.
|
||||||
|
//
|
||||||
|
// Since this is only ever called from `load*` methods on IdentityStore during
|
||||||
|
// an unseal we can assume that it's all from a single goroutine and does'nt
|
||||||
|
// need locking.
|
||||||
|
seenEntities map[string][]*identity.Entity
|
||||||
|
seenGroups map[string][]*identity.Group
|
||||||
|
seenAliases map[string][]*identity.Alias
|
||||||
|
seenLocalAliases map[string][]*identity.Alias
|
||||||
|
logger hclog.Logger
|
||||||
|
}
|
||||||
|
|
||||||
|
func newDuplicateReportingErrorResolver(logger hclog.Logger) *duplicateReportingErrorResolver {
|
||||||
|
return &duplicateReportingErrorResolver{
|
||||||
|
seenEntities: make(map[string][]*identity.Entity),
|
||||||
|
seenGroups: make(map[string][]*identity.Group),
|
||||||
|
seenAliases: make(map[string][]*identity.Alias),
|
||||||
|
seenLocalAliases: make(map[string][]*identity.Alias),
|
||||||
|
logger: logger,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *duplicateReportingErrorResolver) ResolveEntities(ctx context.Context, existing, duplicate *identity.Entity) error {
|
||||||
|
entityKey := fmt.Sprintf("%s/%s", duplicate.NamespaceID, strings.ToLower(duplicate.Name))
|
||||||
|
r.seenEntities[entityKey] = append(r.seenEntities[entityKey], duplicate)
|
||||||
|
return errDuplicateIdentityName
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *duplicateReportingErrorResolver) ResolveGroups(ctx context.Context, existing, duplicate *identity.Group) error {
|
||||||
|
groupKey := fmt.Sprintf("%s/%s", duplicate.NamespaceID, strings.ToLower(duplicate.Name))
|
||||||
|
r.seenGroups[groupKey] = append(r.seenGroups[groupKey], duplicate)
|
||||||
|
return errDuplicateIdentityName
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *duplicateReportingErrorResolver) ResolveAliases(ctx context.Context, parent *identity.Entity, existing, duplicate *identity.Alias) error {
|
||||||
|
aliasKey := fmt.Sprintf("%s/%s", duplicate.MountAccessor, strings.ToLower(duplicate.Name))
|
||||||
|
if duplicate.Local {
|
||||||
|
r.seenLocalAliases[aliasKey] = append(r.seenLocalAliases[aliasKey], duplicate)
|
||||||
|
} else {
|
||||||
|
r.seenAliases[aliasKey] = append(r.seenAliases[aliasKey], duplicate)
|
||||||
|
}
|
||||||
|
return errDuplicateIdentityName
|
||||||
|
}
|
||||||
|
|
||||||
|
type identityDuplicateReportEntry struct {
|
||||||
|
artifactType string
|
||||||
|
scope string
|
||||||
|
name string
|
||||||
|
id string
|
||||||
|
canonicalID string
|
||||||
|
resolutionHint string
|
||||||
|
index int // we care about preserving load order in reporting
|
||||||
|
numOthers int
|
||||||
|
}
|
||||||
|
|
||||||
|
type identityDuplicateReport struct {
|
||||||
|
entities []identityDuplicateReportEntry
|
||||||
|
groups []identityDuplicateReportEntry
|
||||||
|
aliases []identityDuplicateReportEntry
|
||||||
|
localAliases []identityDuplicateReportEntry
|
||||||
|
numEntityDuplicates int
|
||||||
|
numGroupDuplicates int
|
||||||
|
numAliasDuplicates int
|
||||||
|
numLocalAliasDuplicates int
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *identityDuplicateReportEntry) Description() string {
|
||||||
|
scopeField := "namespace ID"
|
||||||
|
if r.artifactType == "entity-alias" || r.artifactType == "local entity-alias" {
|
||||||
|
scopeField = "mount accessor"
|
||||||
|
}
|
||||||
|
return fmt.Sprintf("%s %q with %s %q duplicates %d others",
|
||||||
|
r.artifactType, r.name, scopeField, r.scope, r.numOthers)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Labels returns metadata pairs suitable for passing to a logger each slice
|
||||||
|
// element corresponds alternately to a key and then a value.
|
||||||
|
func (r *identityDuplicateReportEntry) Labels() []interface{} {
|
||||||
|
args := []interface{}{"id", r.id}
|
||||||
|
if r.canonicalID != "" {
|
||||||
|
args = append(args, "canonical_id")
|
||||||
|
args = append(args, r.canonicalID)
|
||||||
|
}
|
||||||
|
if r.resolutionHint != "" {
|
||||||
|
args = append(args, "force_deduplication")
|
||||||
|
args = append(args, r.resolutionHint)
|
||||||
|
}
|
||||||
|
return args
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *duplicateReportingErrorResolver) Report() identityDuplicateReport {
|
||||||
|
var report identityDuplicateReport
|
||||||
|
|
||||||
|
for _, entities := range r.seenEntities {
|
||||||
|
if len(entities) <= 1 {
|
||||||
|
// Fast path, skip non-duplicates
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
report.numEntityDuplicates++
|
||||||
|
// We don't care if it's an exact match or not for entities since we'll
|
||||||
|
// rename in either case when we force a de-dupe.
|
||||||
|
for idx, entity := range entities {
|
||||||
|
r := identityDuplicateReportEntry{
|
||||||
|
artifactType: "entity",
|
||||||
|
scope: entity.NamespaceID,
|
||||||
|
name: entity.Name,
|
||||||
|
id: entity.ID,
|
||||||
|
index: idx,
|
||||||
|
numOthers: len(entities) - 1,
|
||||||
|
}
|
||||||
|
if idx < len(entities)-1 {
|
||||||
|
r.resolutionHint = fmt.Sprintf("would rename to %s-%s", entity.Name, entity.ID)
|
||||||
|
} else {
|
||||||
|
r.resolutionHint = "would not rename"
|
||||||
|
}
|
||||||
|
report.entities = append(report.entities, r)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
sortReportEntries(report.entities)
|
||||||
|
|
||||||
|
for _, groups := range r.seenGroups {
|
||||||
|
if len(groups) <= 1 {
|
||||||
|
// Fast path, skip non-duplicates
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
report.numGroupDuplicates++
|
||||||
|
// We don't care if it's an exact match or not for groups since we'll
|
||||||
|
// rename in either case when we force a de-dupe.
|
||||||
|
for idx, group := range groups {
|
||||||
|
r := identityDuplicateReportEntry{
|
||||||
|
artifactType: "group",
|
||||||
|
scope: group.NamespaceID,
|
||||||
|
name: group.Name,
|
||||||
|
id: group.ID,
|
||||||
|
index: idx,
|
||||||
|
numOthers: len(groups) - 1,
|
||||||
|
}
|
||||||
|
if idx < len(groups)-1 {
|
||||||
|
r.resolutionHint = fmt.Sprintf("would rename to %s-%s", group.Name, group.ID)
|
||||||
|
} else {
|
||||||
|
r.resolutionHint = "would not rename"
|
||||||
|
}
|
||||||
|
report.groups = append(report.groups, r)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
sortReportEntries(report.groups)
|
||||||
|
|
||||||
|
reportAliases(&report, r.seenAliases, false)
|
||||||
|
reportAliases(&report, r.seenLocalAliases, true)
|
||||||
|
|
||||||
|
return report
|
||||||
|
}
|
||||||
|
|
||||||
|
func reportAliases(report *identityDuplicateReport, seen map[string][]*identity.Alias, local bool) {
|
||||||
|
artType := "entity-alias"
|
||||||
|
if local {
|
||||||
|
artType = "local entity-alias"
|
||||||
|
}
|
||||||
|
for _, aliases := range seen {
|
||||||
|
if len(aliases) <= 1 {
|
||||||
|
// Fast path, skip non-duplicates
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if local {
|
||||||
|
report.numLocalAliasDuplicates++
|
||||||
|
} else {
|
||||||
|
report.numAliasDuplicates++
|
||||||
|
}
|
||||||
|
// We can't have exact match duplicated for aliases at this point because
|
||||||
|
// the would have been merged during load. These are different-case
|
||||||
|
// duplicates that must be handled.
|
||||||
|
for idx, alias := range aliases {
|
||||||
|
r := identityDuplicateReportEntry{
|
||||||
|
artifactType: artType,
|
||||||
|
scope: alias.MountAccessor,
|
||||||
|
name: alias.Name,
|
||||||
|
id: alias.ID,
|
||||||
|
canonicalID: alias.CanonicalID,
|
||||||
|
index: idx,
|
||||||
|
numOthers: len(aliases) - 1,
|
||||||
|
}
|
||||||
|
if idx < len(aliases)-1 {
|
||||||
|
r.resolutionHint = fmt.Sprintf("would merge into entity %s", aliases[len(aliases)-1].CanonicalID)
|
||||||
|
} else {
|
||||||
|
r.resolutionHint = "would merge others into this entity"
|
||||||
|
}
|
||||||
|
if local {
|
||||||
|
report.localAliases = append(report.localAliases, r)
|
||||||
|
} else {
|
||||||
|
report.aliases = append(report.aliases, r)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
sortReportEntries(report.aliases)
|
||||||
|
}
|
||||||
|
|
||||||
|
func sortReportEntries(es []identityDuplicateReportEntry) {
|
||||||
|
sort.Slice(es, func(i, j int) bool {
|
||||||
|
a, b := es[i], es[j]
|
||||||
|
if a.scope != b.scope {
|
||||||
|
return a.scope < b.scope
|
||||||
|
}
|
||||||
|
aName, bName := strings.ToLower(a.name), strings.ToLower(b.name)
|
||||||
|
if aName != bName {
|
||||||
|
return aName < bName
|
||||||
|
}
|
||||||
|
return a.index < b.index
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// Warner is a subset of hclog.Logger that only has the Warn method to make
|
||||||
|
// testing simpler.
|
||||||
|
type Warner interface {
|
||||||
|
Warn(msg string, args ...interface{})
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO set this correctly.
|
||||||
|
const identityDuplicateReportUrl = "https://developer.hashicorp.com/vault/docs/upgrading/identity-deduplication"
|
||||||
|
|
||||||
|
func (r *duplicateReportingErrorResolver) LogReport(log Warner) {
|
||||||
|
report := r.Report()
|
||||||
|
|
||||||
|
if report.numEntityDuplicates == 0 && report.numGroupDuplicates == 0 && report.numAliasDuplicates == 0 {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
log.Warn("DUPLICATES DETECTED, see following logs for details and refer to " +
|
||||||
|
identityDuplicateReportUrl + " for resolution.")
|
||||||
|
|
||||||
|
// Aliases first since they are most critical to resolve. Local first because
|
||||||
|
// all the rest can be ignored on a perf secondary.
|
||||||
|
if len(report.localAliases) > 0 {
|
||||||
|
log.Warn(fmt.Sprintf("%d different-case local entity alias duplicates found (potential security risk)", report.numLocalAliasDuplicates))
|
||||||
|
for _, e := range report.localAliases {
|
||||||
|
log.Warn(e.Description(), e.Labels()...)
|
||||||
|
}
|
||||||
|
log.Warn("end of different-case local entity-alias duplicates")
|
||||||
|
}
|
||||||
|
if len(report.aliases) > 0 {
|
||||||
|
log.Warn(fmt.Sprintf("%d different-case entity alias duplicates found (potential security risk)", report.numAliasDuplicates))
|
||||||
|
for _, e := range report.aliases {
|
||||||
|
log.Warn(e.Description(), e.Labels()...)
|
||||||
|
}
|
||||||
|
log.Warn("end of different-case entity-alias duplicates")
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(report.entities) > 0 {
|
||||||
|
log.Warn(fmt.Sprintf("%d entity duplicates found", report.numEntityDuplicates))
|
||||||
|
for _, e := range report.entities {
|
||||||
|
log.Warn(e.Description(), e.Labels()...)
|
||||||
|
}
|
||||||
|
log.Warn("end of entity duplicates")
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(report.groups) > 0 {
|
||||||
|
log.Warn(fmt.Sprintf("%d group duplicates found", report.numGroupDuplicates))
|
||||||
|
for _, e := range report.groups {
|
||||||
|
log.Warn(e.Description(), e.Labels()...)
|
||||||
|
}
|
||||||
|
log.Warn("end of group duplicates")
|
||||||
|
}
|
||||||
|
log.Warn("end of identity duplicate report, refer to " +
|
||||||
|
identityDuplicateReportUrl + " for resolution.")
|
||||||
|
}
|
||||||
|
|||||||
180
vault/identity_store_conflicts_test.go
Normal file
180
vault/identity_store_conflicts_test.go
Normal file
@@ -0,0 +1,180 @@
|
|||||||
|
// Copyright (c) HashiCorp, Inc.
|
||||||
|
// SPDX-License-Identifier: BUSL-1.1
|
||||||
|
|
||||||
|
package vault
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
log "github.com/hashicorp/go-hclog"
|
||||||
|
"github.com/hashicorp/vault/helper/identity"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
)
|
||||||
|
|
||||||
|
// TestDuplicateReportingErrorResolver tests that the reporting error resolver
|
||||||
|
// correctly records, identifies, sorts and outputs information about duplicate
|
||||||
|
// entities.
|
||||||
|
func TestDuplicateReportingErrorResolver(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
entities := [][]string{
|
||||||
|
// Some unduplicated entities in a different namespaces
|
||||||
|
{"root", "foo"},
|
||||||
|
{"root", "bar"},
|
||||||
|
{"admin", "foo"},
|
||||||
|
{"developers", "BAR"},
|
||||||
|
|
||||||
|
// Some exact-match duplicates in different namespaces
|
||||||
|
{"root", "exact-dupe-1"},
|
||||||
|
{"root", "exact-dupe-1"},
|
||||||
|
{"admin", "exact-dupe-1"},
|
||||||
|
{"admin", "exact-dupe-1"},
|
||||||
|
|
||||||
|
// Some different-case duplicates in different namespaces
|
||||||
|
{"root", "different-case-dupe-1"},
|
||||||
|
{"root", "DIFFERENT-CASE-DUPE-1"},
|
||||||
|
{"admin", "different-case-dupe-1"},
|
||||||
|
{"admin", "DIFFERENT-case-DUPE-1"},
|
||||||
|
{"admin", "different-case-DUPE-1"},
|
||||||
|
}
|
||||||
|
|
||||||
|
// Note that `local-` prefix here is used to define a mount as local as well
|
||||||
|
// as used in it's name.
|
||||||
|
aliases := [][]string{
|
||||||
|
// Unduplicated aliases on different mounts
|
||||||
|
{"mount1", "alias1"},
|
||||||
|
{"mount2", "alias1"},
|
||||||
|
{"mount2", "alias2"},
|
||||||
|
{"local-mount", "alias1"},
|
||||||
|
|
||||||
|
// We don't bother testing exact-match aliases since they will have been
|
||||||
|
// merged by the time they are reported)
|
||||||
|
|
||||||
|
// Different-case aliases on different mounts (some local)
|
||||||
|
{"mount1", "different-case-alias-1"},
|
||||||
|
{"mount1", "DIFFERENT-CASE-ALIAS-1"},
|
||||||
|
{"mount2", "different-case-alias-1"},
|
||||||
|
{"mount2", "DIFFERENT-CASE-ALIAS-1"},
|
||||||
|
{"mount2", "different-CASE-ALIAS-1"},
|
||||||
|
{"local-mount", "DIFFERENT-CASE-ALIAS-1"},
|
||||||
|
{"local-mount", "different-CASE-ALIAS-1"},
|
||||||
|
}
|
||||||
|
|
||||||
|
expectReport := `
|
||||||
|
DUPLICATES DETECTED, see following logs for details and refer to https://developer.hashicorp.com/vault/docs/upgrading/identity-deduplication for resolution.:
|
||||||
|
1 different-case local entity alias duplicates found (potential security risk):
|
||||||
|
local entity-alias "DIFFERENT-CASE-ALIAS-1" with mount accessor "local-mount" duplicates 1 others: id="00000000-0000-0000-0000-000000000009" canonical_id="11111111-0000-0000-0000-000000000009" force_deduplication="would merge into entity 11111111-0000-0000-0000-000000000010"
|
||||||
|
local entity-alias "different-CASE-ALIAS-1" with mount accessor "local-mount" duplicates 1 others: id="00000000-0000-0000-0000-000000000010" canonical_id="11111111-0000-0000-0000-000000000010" force_deduplication="would merge others into this entity"
|
||||||
|
end of different-case local entity-alias duplicates:
|
||||||
|
2 different-case entity alias duplicates found (potential security risk):
|
||||||
|
entity-alias "different-case-alias-1" with mount accessor "mount1" duplicates 1 others: id="00000000-0000-0000-0000-000000000004" canonical_id="11111111-0000-0000-0000-000000000004" force_deduplication="would merge into entity 11111111-0000-0000-0000-000000000005"
|
||||||
|
entity-alias "DIFFERENT-CASE-ALIAS-1" with mount accessor "mount1" duplicates 1 others: id="00000000-0000-0000-0000-000000000005" canonical_id="11111111-0000-0000-0000-000000000005" force_deduplication="would merge others into this entity"
|
||||||
|
entity-alias "different-case-alias-1" with mount accessor "mount2" duplicates 2 others: id="00000000-0000-0000-0000-000000000006" canonical_id="11111111-0000-0000-0000-000000000006" force_deduplication="would merge into entity 11111111-0000-0000-0000-000000000008"
|
||||||
|
entity-alias "DIFFERENT-CASE-ALIAS-1" with mount accessor "mount2" duplicates 2 others: id="00000000-0000-0000-0000-000000000007" canonical_id="11111111-0000-0000-0000-000000000007" force_deduplication="would merge into entity 11111111-0000-0000-0000-000000000008"
|
||||||
|
entity-alias "different-CASE-ALIAS-1" with mount accessor "mount2" duplicates 2 others: id="00000000-0000-0000-0000-000000000008" canonical_id="11111111-0000-0000-0000-000000000008" force_deduplication="would merge others into this entity"
|
||||||
|
end of different-case entity-alias duplicates:
|
||||||
|
4 entity duplicates found:
|
||||||
|
entity "different-case-dupe-1" with namespace ID "admin" duplicates 2 others: id="00000000-0000-0000-0000-000000000010" force_deduplication="would rename to different-case-dupe-1-00000000-0000-0000-0000-000000000010"
|
||||||
|
entity "DIFFERENT-case-DUPE-1" with namespace ID "admin" duplicates 2 others: id="00000000-0000-0000-0000-000000000011" force_deduplication="would rename to DIFFERENT-case-DUPE-1-00000000-0000-0000-0000-000000000011"
|
||||||
|
entity "different-case-DUPE-1" with namespace ID "admin" duplicates 2 others: id="00000000-0000-0000-0000-000000000012" force_deduplication="would not rename"
|
||||||
|
entity "exact-dupe-1" with namespace ID "admin" duplicates 1 others: id="00000000-0000-0000-0000-000000000006" force_deduplication="would rename to exact-dupe-1-00000000-0000-0000-0000-000000000006"
|
||||||
|
entity "exact-dupe-1" with namespace ID "admin" duplicates 1 others: id="00000000-0000-0000-0000-000000000007" force_deduplication="would not rename"
|
||||||
|
entity "different-case-dupe-1" with namespace ID "root" duplicates 1 others: id="00000000-0000-0000-0000-000000000008" force_deduplication="would rename to different-case-dupe-1-00000000-0000-0000-0000-000000000008"
|
||||||
|
entity "DIFFERENT-CASE-DUPE-1" with namespace ID "root" duplicates 1 others: id="00000000-0000-0000-0000-000000000009" force_deduplication="would not rename"
|
||||||
|
entity "exact-dupe-1" with namespace ID "root" duplicates 1 others: id="00000000-0000-0000-0000-000000000004" force_deduplication="would rename to exact-dupe-1-00000000-0000-0000-0000-000000000004"
|
||||||
|
entity "exact-dupe-1" with namespace ID "root" duplicates 1 others: id="00000000-0000-0000-0000-000000000005" force_deduplication="would not rename"
|
||||||
|
end of entity duplicates:
|
||||||
|
4 group duplicates found:
|
||||||
|
group "different-case-dupe-1" with namespace ID "admin" duplicates 2 others: id="00000000-0000-0000-0000-000000000010" force_deduplication="would rename to different-case-dupe-1-00000000-0000-0000-0000-000000000010"
|
||||||
|
group "DIFFERENT-case-DUPE-1" with namespace ID "admin" duplicates 2 others: id="00000000-0000-0000-0000-000000000011" force_deduplication="would rename to DIFFERENT-case-DUPE-1-00000000-0000-0000-0000-000000000011"
|
||||||
|
group "different-case-DUPE-1" with namespace ID "admin" duplicates 2 others: id="00000000-0000-0000-0000-000000000012" force_deduplication="would not rename"
|
||||||
|
group "exact-dupe-1" with namespace ID "admin" duplicates 1 others: id="00000000-0000-0000-0000-000000000006" force_deduplication="would rename to exact-dupe-1-00000000-0000-0000-0000-000000000006"
|
||||||
|
group "exact-dupe-1" with namespace ID "admin" duplicates 1 others: id="00000000-0000-0000-0000-000000000007" force_deduplication="would not rename"
|
||||||
|
group "different-case-dupe-1" with namespace ID "root" duplicates 1 others: id="00000000-0000-0000-0000-000000000008" force_deduplication="would rename to different-case-dupe-1-00000000-0000-0000-0000-000000000008"
|
||||||
|
group "DIFFERENT-CASE-DUPE-1" with namespace ID "root" duplicates 1 others: id="00000000-0000-0000-0000-000000000009" force_deduplication="would not rename"
|
||||||
|
group "exact-dupe-1" with namespace ID "root" duplicates 1 others: id="00000000-0000-0000-0000-000000000004" force_deduplication="would rename to exact-dupe-1-00000000-0000-0000-0000-000000000004"
|
||||||
|
group "exact-dupe-1" with namespace ID "root" duplicates 1 others: id="00000000-0000-0000-0000-000000000005" force_deduplication="would not rename"
|
||||||
|
end of group duplicates:
|
||||||
|
end of identity duplicate report, refer to https://developer.hashicorp.com/vault/docs/upgrading/identity-deduplication for resolution.:
|
||||||
|
`
|
||||||
|
|
||||||
|
// Create a new errorResolver
|
||||||
|
r := newDuplicateReportingErrorResolver(log.NewNullLogger())
|
||||||
|
|
||||||
|
for i, pair := range entities {
|
||||||
|
// Create a fake UUID based on the index this makes sure sort order is
|
||||||
|
// preserved when eyeballing the expected report.
|
||||||
|
id := fmt.Sprintf("00000000-0000-0000-0000-%012d", i)
|
||||||
|
// Create a new entity with the pair
|
||||||
|
entity := &identity.Entity{
|
||||||
|
ID: id,
|
||||||
|
Name: pair[1],
|
||||||
|
NamespaceID: pair[0],
|
||||||
|
}
|
||||||
|
|
||||||
|
// Call ResolveEntities, assume existing is nil for now. In real life we
|
||||||
|
// should be passed the existing entity for the exact match dupes but we
|
||||||
|
// don't depend on that so it's fine to omit.
|
||||||
|
_ = r.ResolveEntities(context.Background(), nil, entity)
|
||||||
|
// Don't care about the actual error here since it would be ignored in
|
||||||
|
// case-sensitive mode anyway.
|
||||||
|
|
||||||
|
// Also, since the data model is the same, pretend these are groups too
|
||||||
|
group := &identity.Group{
|
||||||
|
ID: id,
|
||||||
|
Name: pair[1],
|
||||||
|
NamespaceID: pair[0],
|
||||||
|
}
|
||||||
|
_ = r.ResolveGroups(context.Background(), nil, group)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load aliases second because that is realistic and yet we want to report on
|
||||||
|
// them first.
|
||||||
|
for i, pair := range aliases {
|
||||||
|
entity := &identity.Entity{
|
||||||
|
ID: fmt.Sprintf("11111111-0000-0000-0000-%012d", i),
|
||||||
|
Name: pair[1] + "-entity",
|
||||||
|
NamespaceID: "root",
|
||||||
|
}
|
||||||
|
alias := &identity.Alias{
|
||||||
|
ID: fmt.Sprintf("00000000-0000-0000-0000-%012d", i),
|
||||||
|
CanonicalID: entity.ID,
|
||||||
|
Name: pair[1],
|
||||||
|
MountAccessor: pair[0],
|
||||||
|
// Parse our hacky DSL to define some alias mounts as local
|
||||||
|
Local: strings.HasPrefix(pair[0], "local-"),
|
||||||
|
}
|
||||||
|
_ = r.ResolveAliases(context.Background(), entity, nil, alias)
|
||||||
|
}
|
||||||
|
|
||||||
|
// "log" the report and check it matches expected report below.
|
||||||
|
var testLog identityTestWarnLogger
|
||||||
|
r.LogReport(&testLog)
|
||||||
|
|
||||||
|
// Dump the raw report to make it easier to copy paste/read
|
||||||
|
t.Log("\n\n" + testLog.buf.String())
|
||||||
|
|
||||||
|
require.Equal(t,
|
||||||
|
strings.TrimSpace(expectReport),
|
||||||
|
strings.TrimSpace(testLog.buf.String()),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
type identityTestWarnLogger struct {
|
||||||
|
buf bytes.Buffer
|
||||||
|
}
|
||||||
|
|
||||||
|
func (l *identityTestWarnLogger) Warn(msg string, args ...interface{}) {
|
||||||
|
l.buf.WriteString(msg + ":")
|
||||||
|
if len(args)%2 != 0 {
|
||||||
|
panic("args must be key-value pairs")
|
||||||
|
}
|
||||||
|
for i := 0; i < len(args); i += 2 {
|
||||||
|
l.buf.WriteString(fmt.Sprintf(" %s=%q", args[i], args[i+1]))
|
||||||
|
}
|
||||||
|
l.buf.WriteString("\n")
|
||||||
|
}
|
||||||
@@ -7,12 +7,16 @@ import (
|
|||||||
"context"
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
"math/rand"
|
"math/rand"
|
||||||
|
"regexp"
|
||||||
|
"slices"
|
||||||
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/armon/go-metrics"
|
"github.com/armon/go-metrics"
|
||||||
"github.com/go-test/deep"
|
"github.com/go-test/deep"
|
||||||
|
"github.com/hashicorp/go-hclog"
|
||||||
uuid "github.com/hashicorp/go-uuid"
|
uuid "github.com/hashicorp/go-uuid"
|
||||||
credGithub "github.com/hashicorp/vault/builtin/credential/github"
|
credGithub "github.com/hashicorp/vault/builtin/credential/github"
|
||||||
"github.com/hashicorp/vault/builtin/credential/userpass"
|
"github.com/hashicorp/vault/builtin/credential/userpass"
|
||||||
@@ -1457,10 +1461,10 @@ func TestEntityStoreLoadingIsDeterministic(t *testing.T) {
|
|||||||
// probability that is unrealistic but ensures we have duplicates on every
|
// probability that is unrealistic but ensures we have duplicates on every
|
||||||
// test run with high probability and more than 1 duplicate often.
|
// test run with high probability and more than 1 duplicate often.
|
||||||
for i := 0; i <= 100; i++ {
|
for i := 0; i <= 100; i++ {
|
||||||
id := fmt.Sprintf("entity-%d", i)
|
name := fmt.Sprintf("entity-%d", i)
|
||||||
alias := fmt.Sprintf("alias-%d", i)
|
alias := fmt.Sprintf("alias-%d", i)
|
||||||
localAlias := fmt.Sprintf("localalias-%d", i)
|
localAlias := fmt.Sprintf("localalias-%d", i)
|
||||||
e := makeEntityForPacker(t, id, c.identityStore.entityPacker)
|
e := makeEntityForPacker(t, name, c.identityStore.entityPacker)
|
||||||
attachAlias(t, e, alias, upme)
|
attachAlias(t, e, alias, upme)
|
||||||
attachAlias(t, e, localAlias, localMe)
|
attachAlias(t, e, localAlias, localMe)
|
||||||
err = TestHelperWriteToStoragePacker(ctx, c.identityStore.entityPacker, e.ID, e)
|
err = TestHelperWriteToStoragePacker(ctx, c.identityStore.entityPacker, e.ID, e)
|
||||||
@@ -1493,6 +1497,14 @@ func TestEntityStoreLoadingIsDeterministic(t *testing.T) {
|
|||||||
rnd = rand.Float64()
|
rnd = rand.Float64()
|
||||||
dupeNum++
|
dupeNum++
|
||||||
}
|
}
|
||||||
|
// See if we should add entity _name_ duplicates too (with no aliases)
|
||||||
|
rnd = rand.Float64()
|
||||||
|
for rnd < pDup {
|
||||||
|
e := makeEntityForPacker(t, name, c.identityStore.entityPacker)
|
||||||
|
err = TestHelperWriteToStoragePacker(ctx, c.identityStore.entityPacker, e.ID, e)
|
||||||
|
require.NoError(t, err)
|
||||||
|
rnd = rand.Float64()
|
||||||
|
}
|
||||||
// One more edge case is that it's currently possible as of the time of
|
// One more edge case is that it's currently possible as of the time of
|
||||||
// writing for a failure during entity invalidation to result in a permanent
|
// writing for a failure during entity invalidation to result in a permanent
|
||||||
// "cached" entity in the local alias packer even though we do have the
|
// "cached" entity in the local alias packer even though we do have the
|
||||||
@@ -1511,23 +1523,27 @@ func TestEntityStoreLoadingIsDeterministic(t *testing.T) {
|
|||||||
|
|
||||||
// Create some groups
|
// Create some groups
|
||||||
for i := 0; i <= 100; i++ {
|
for i := 0; i <= 100; i++ {
|
||||||
id := fmt.Sprintf("group-%d", i)
|
name := fmt.Sprintf("group-%d", i)
|
||||||
bucketKey := c.identityStore.groupPacker.BucketKey(id)
|
|
||||||
// Add an alias to every other group
|
// Add an alias to every other group
|
||||||
alias := ""
|
alias := ""
|
||||||
if i%2 == 0 {
|
if i%2 == 0 {
|
||||||
alias = fmt.Sprintf("groupalias-%d", i)
|
alias = fmt.Sprintf("groupalias-%d", i)
|
||||||
}
|
}
|
||||||
e := makeGroupWithIDAndAlias(t, id, alias, bucketKey, upme)
|
e := makeGroupWithNameAndAlias(t, name, alias, c.identityStore.groupPacker, upme)
|
||||||
err = TestHelperWriteToStoragePacker(ctx, c.identityStore.groupPacker, e.ID, e)
|
err = TestHelperWriteToStoragePacker(ctx, c.identityStore.groupPacker, e.ID, e)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
}
|
}
|
||||||
// Now add 10 groups with the same alias to ensure duplicates don't cause
|
// Now add 10 groups with the same alias to ensure duplicates don't cause
|
||||||
// non-deterministic behavior.
|
// non-deterministic behavior.
|
||||||
for i := 0; i <= 10; i++ {
|
for i := 0; i <= 10; i++ {
|
||||||
id := fmt.Sprintf("group-dup-%d", i)
|
name := fmt.Sprintf("group-dup-%d", i)
|
||||||
bucketKey := c.identityStore.groupPacker.BucketKey(id)
|
e := makeGroupWithNameAndAlias(t, name, "groupalias-dup", c.identityStore.groupPacker, upme)
|
||||||
e := makeGroupWithIDAndAlias(t, id, "groupalias-dup", bucketKey, upme)
|
err = TestHelperWriteToStoragePacker(ctx, c.identityStore.groupPacker, e.ID, e)
|
||||||
|
require.NoError(t, err)
|
||||||
|
}
|
||||||
|
// Add a second and third groups with duplicate names too.
|
||||||
|
for _, name := range []string{"group-0", "group-1", "group-1"} {
|
||||||
|
e := makeGroupWithNameAndAlias(t, name, "", c.identityStore.groupPacker, upme)
|
||||||
err = TestHelperWriteToStoragePacker(ctx, c.identityStore.groupPacker, e.ID, e)
|
err = TestHelperWriteToStoragePacker(ctx, c.identityStore.groupPacker, e.ID, e)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
}
|
}
|
||||||
@@ -1539,14 +1555,14 @@ func TestEntityStoreLoadingIsDeterministic(t *testing.T) {
|
|||||||
// To test that this is deterministic we need to load from storage a bunch of
|
// To test that this is deterministic we need to load from storage a bunch of
|
||||||
// times and make sure we get the same result. For easier debugging we'll
|
// times and make sure we get the same result. For easier debugging we'll
|
||||||
// build a list of human readable ids that we can compare.
|
// build a list of human readable ids that we can compare.
|
||||||
lastIDs := []string{}
|
prevLoadedNames := []string{}
|
||||||
for i := 0; i < 10; i++ {
|
for i := 0; i < 10; i++ {
|
||||||
// Seal and unseal to reload the identity store
|
// Seal and unseal to reload the identity store
|
||||||
require.NoError(t, c.Seal(rootToken))
|
require.NoError(t, c.Seal(rootToken))
|
||||||
require.True(t, c.Sealed())
|
require.True(t, c.Sealed())
|
||||||
for _, key := range sealKeys {
|
for _, key := range sealKeys {
|
||||||
unsealed, err := c.Unseal(key)
|
unsealed, err := c.Unseal(key)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err, "failed unseal on attempt %d", i)
|
||||||
if unsealed {
|
if unsealed {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
@@ -1554,7 +1570,7 @@ func TestEntityStoreLoadingIsDeterministic(t *testing.T) {
|
|||||||
require.False(t, c.Sealed())
|
require.False(t, c.Sealed())
|
||||||
|
|
||||||
// Identity store should be loaded now. Check it's contents.
|
// Identity store should be loaded now. Check it's contents.
|
||||||
loadedIDs := []string{}
|
loadedNames := []string{}
|
||||||
|
|
||||||
tx := c.identityStore.db.Txn(false)
|
tx := c.identityStore.db.Txn(false)
|
||||||
|
|
||||||
@@ -1562,73 +1578,143 @@ func TestEntityStoreLoadingIsDeterministic(t *testing.T) {
|
|||||||
iter, err := tx.LowerBound(entitiesTable, "id", "")
|
iter, err := tx.LowerBound(entitiesTable, "id", "")
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
for item := iter.Next(); item != nil; item = iter.Next() {
|
for item := iter.Next(); item != nil; item = iter.Next() {
|
||||||
// We already added "type" prefixes to the IDs when creating them so just
|
|
||||||
// append here.
|
|
||||||
e := item.(*identity.Entity)
|
e := item.(*identity.Entity)
|
||||||
loadedIDs = append(loadedIDs, e.ID)
|
loadedNames = append(loadedNames, e.Name)
|
||||||
for _, a := range e.Aliases {
|
for _, a := range e.Aliases {
|
||||||
loadedIDs = append(loadedIDs, a.ID)
|
loadedNames = append(loadedNames, a.Name)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// This is a non-triviality check to make sure we actually loaded stuff and
|
// This is a non-triviality check to make sure we actually loaded stuff and
|
||||||
// are not just passing because of a bug in the test.
|
// are not just passing because of a bug in the test.
|
||||||
numLoaded := len(loadedIDs)
|
numLoaded := len(loadedNames)
|
||||||
require.Greater(t, numLoaded, 300, "not enough entities and aliases loaded on attempt %d", i)
|
require.Greater(t, numLoaded, 300, "not enough entities and aliases loaded on attempt %d", i)
|
||||||
|
|
||||||
|
// Standalone alias query
|
||||||
|
iter, err = tx.LowerBound(entityAliasesTable, "id", "")
|
||||||
|
require.NoError(t, err)
|
||||||
|
for item := iter.Next(); item != nil; item = iter.Next() {
|
||||||
|
a := item.(*identity.Alias)
|
||||||
|
loadedNames = append(loadedNames, a.Name)
|
||||||
|
}
|
||||||
|
|
||||||
// Groups
|
// Groups
|
||||||
iter, err = tx.LowerBound(groupsTable, "id", "")
|
iter, err = tx.LowerBound(groupsTable, "id", "")
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
for item := iter.Next(); item != nil; item = iter.Next() {
|
for item := iter.Next(); item != nil; item = iter.Next() {
|
||||||
g := item.(*identity.Group)
|
g := item.(*identity.Group)
|
||||||
loadedIDs = append(loadedIDs, g.ID)
|
loadedNames = append(loadedNames, g.Name)
|
||||||
if g.Alias != nil {
|
if g.Alias != nil {
|
||||||
loadedIDs = append(loadedIDs, g.Alias.ID)
|
loadedNames = append(loadedNames, g.Alias.Name)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// This is a non-triviality check to make sure we actually loaded stuff and
|
// This is a non-triviality check to make sure we actually loaded stuff and
|
||||||
// are not just passing because of a bug in the test.
|
// are not just passing because of a bug in the test.
|
||||||
groupsLoaded := len(loadedIDs) - numLoaded
|
groupsLoaded := len(loadedNames) - numLoaded
|
||||||
require.Greater(t, groupsLoaded, 140, "not enough groups and aliases loaded on attempt %d", i)
|
require.Greater(t, groupsLoaded, 140, "not enough groups and aliases loaded on attempt %d", i)
|
||||||
|
|
||||||
entIdentityStoreDeterminismAssert(t, i, loadedIDs, lastIDs)
|
// note `lastIDs` argument is not needed any more but we can't change the
|
||||||
|
// signature without breaking enterprise. It's simpler to keep it unused for
|
||||||
|
// now until both parts of this merge.
|
||||||
|
entIdentityStoreDeterminismAssert(t, i, loadedNames, nil)
|
||||||
|
|
||||||
if i > 0 {
|
if i > 0 {
|
||||||
// Should be in the same order if we are deterministic since MemDB has strong ordering.
|
// Should be in the same order if we are deterministic since MemDB has strong ordering.
|
||||||
require.Equal(t, lastIDs, loadedIDs, "different result on attempt %d", i)
|
require.Equal(t, prevLoadedNames, loadedNames, "different result on attempt %d", i)
|
||||||
}
|
}
|
||||||
lastIDs = loadedIDs
|
|
||||||
|
prevLoadedNames = loadedNames
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func makeGroupWithIDAndAlias(t *testing.T, id, alias, bucketKey string, me *MountEntry) *identity.Group {
|
// TestEntityStoreLoadingDuplicateReporting tests the reporting of different
|
||||||
g := &identity.Group{
|
// types of duplicates during unseal when in case-sensitive mode.
|
||||||
ID: id,
|
func TestEntityStoreLoadingDuplicateReporting(t *testing.T) {
|
||||||
Name: id,
|
logger := corehelpers.NewTestLogger(t)
|
||||||
NamespaceID: namespace.RootNamespaceID,
|
ims, err := inmem.NewTransactionalInmemHA(nil, logger)
|
||||||
BucketKey: bucketKey,
|
require.NoError(t, err)
|
||||||
}
|
|
||||||
if alias != "" {
|
|
||||||
g.Alias = &identity.Alias{
|
|
||||||
ID: id,
|
|
||||||
Name: alias,
|
|
||||||
CanonicalID: id,
|
|
||||||
MountType: me.Type,
|
|
||||||
MountAccessor: me.Accessor,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return g
|
|
||||||
}
|
|
||||||
|
|
||||||
func makeLocalAliasWithID(t *testing.T, id, entityID string, bucketKey string, me *MountEntry) *identity.LocalAliases {
|
cfg := &CoreConfig{
|
||||||
return &identity.LocalAliases{
|
Physical: ims,
|
||||||
Aliases: []*identity.Alias{
|
HAPhysical: ims.(physical.HABackend),
|
||||||
{
|
Logger: logger,
|
||||||
ID: id,
|
BuiltinRegistry: corehelpers.NewMockBuiltinRegistry(),
|
||||||
Name: id,
|
CredentialBackends: map[string]logical.Factory{
|
||||||
CanonicalID: entityID,
|
"userpass": userpass.Factory,
|
||||||
MountType: me.Type,
|
|
||||||
MountAccessor: me.Accessor,
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
c, sealKeys, rootToken := TestCoreUnsealedWithConfig(t, cfg)
|
||||||
|
|
||||||
|
// Inject values into storage
|
||||||
|
upme, err := TestUserpassMount(c, false)
|
||||||
|
require.NoError(t, err)
|
||||||
|
localMe, err := TestUserpassMount(c, true)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
ctx := namespace.RootContext(nil)
|
||||||
|
|
||||||
|
identityCreateCaseDuplicates(t, ctx, c, upme, localMe)
|
||||||
|
|
||||||
|
entIdentityStoreDuplicateReportTestSetup(t, ctx, c, rootToken)
|
||||||
|
|
||||||
|
// Storage is now primed for the test.
|
||||||
|
|
||||||
|
// Seal and unseal to reload the identity store
|
||||||
|
require.NoError(t, c.Seal(rootToken))
|
||||||
|
require.True(t, c.Sealed())
|
||||||
|
|
||||||
|
// Setup a logger we can use to capture unseal logs
|
||||||
|
var unsealLogs []string
|
||||||
|
unsealLogger := &logFn{
|
||||||
|
fn: func(msg string, args []interface{}) {
|
||||||
|
pairs := make([]string, 0, len(args)/2)
|
||||||
|
for pair := range slices.Chunk(args, 2) {
|
||||||
|
// Yes this will panic if we didn't log an even number of args but thats
|
||||||
|
// OK because that's a bug!
|
||||||
|
pairs = append(pairs, fmt.Sprintf("%s=%s", pair[0], pair[1]))
|
||||||
|
}
|
||||||
|
unsealLogs = append(unsealLogs, fmt.Sprintf("%s: %s", msg, strings.Join(pairs, " ")))
|
||||||
|
},
|
||||||
|
}
|
||||||
|
logger.RegisterSink(unsealLogger)
|
||||||
|
|
||||||
|
for _, key := range sealKeys {
|
||||||
|
unsealed, err := c.Unseal(key)
|
||||||
|
require.NoError(t, err)
|
||||||
|
if unsealed {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
require.False(t, c.Sealed())
|
||||||
|
logger.DeregisterSink(unsealLogger)
|
||||||
|
|
||||||
|
// Identity store should be loaded now. Check it's contents.
|
||||||
|
|
||||||
|
// We don't expect any actual behavior change just logs reporting duplicates.
|
||||||
|
// We could assert the current "expected" behavior but it's actually broken in
|
||||||
|
// many of these cases and seems strange to encode in a test that we want
|
||||||
|
// broken behavior!
|
||||||
|
numDupes := make(map[string]int)
|
||||||
|
duplicateCountRe := regexp.MustCompile(`(\d+) (different-case( local)? entity alias|entity|group) duplicates found`)
|
||||||
|
for _, log := range unsealLogs {
|
||||||
|
if matches := duplicateCountRe.FindStringSubmatch(log); len(matches) >= 3 {
|
||||||
|
num, _ := strconv.Atoi(matches[1])
|
||||||
|
numDupes[matches[2]] = num
|
||||||
|
}
|
||||||
|
}
|
||||||
|
t.Logf("numDupes: %v", numDupes)
|
||||||
|
wantAliases, wantLocalAliases, wantEntities, wantGroups := identityStoreDuplicateReportTestWantDuplicateCounts()
|
||||||
|
require.Equal(t, wantLocalAliases, numDupes["different-case local entity alias"])
|
||||||
|
require.Equal(t, wantAliases, numDupes["different-case entity alias"])
|
||||||
|
require.Equal(t, wantEntities, numDupes["entity"])
|
||||||
|
require.Equal(t, wantGroups, numDupes["group"])
|
||||||
|
}
|
||||||
|
|
||||||
|
type logFn struct {
|
||||||
|
fn func(msg string, args []interface{})
|
||||||
|
}
|
||||||
|
|
||||||
|
// Accept implements hclog.SinkAdapter
|
||||||
|
func (f *logFn) Accept(name string, level hclog.Level, msg string, args ...interface{}) {
|
||||||
|
f.fn(msg, args)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -12,10 +12,25 @@ import (
|
|||||||
|
|
||||||
//go:generate go run github.com/hashicorp/vault/tools/stubmaker
|
//go:generate go run github.com/hashicorp/vault/tools/stubmaker
|
||||||
|
|
||||||
func entIdentityStoreDeterminismTestSetup(t *testing.T, ctx context.Context, c *Core, upme, localme *MountEntry) {
|
func entIdentityStoreDeterminismTestSetup(t *testing.T, ctx context.Context, c *Core, me, localme *MountEntry) {
|
||||||
// no op
|
// no op
|
||||||
}
|
}
|
||||||
|
|
||||||
func entIdentityStoreDeterminismAssert(t *testing.T, i int, loadedIDs, lastIDs []string) {
|
func entIdentityStoreDeterminismAssert(t *testing.T, i int, loadedIDs, lastIDs []string) {
|
||||||
// no op
|
// no op
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func entIdentityStoreDuplicateReportTestSetup(t *testing.T, ctx context.Context, c *Core, rootToken string) {
|
||||||
|
// no op
|
||||||
|
}
|
||||||
|
|
||||||
|
func identityStoreDuplicateReportTestWantDuplicateCounts() (int, int, int, int) {
|
||||||
|
// Note that the second count is for local aliases. CE Vault doesn't really
|
||||||
|
// distinguish between local and non-local aliases because it doesn't have any
|
||||||
|
// support for Performance Replication. But it's possible in code at least to
|
||||||
|
// set the local flag on a mount or alias during creation so we might as well
|
||||||
|
// test it behaves as expected in the CE code. It's maybe just about possible
|
||||||
|
// that this could happen in real life too because of a downgrade from
|
||||||
|
// Enterprise.
|
||||||
|
return 1, 1, 1, 1
|
||||||
|
}
|
||||||
|
|||||||
@@ -23,6 +23,7 @@ import (
|
|||||||
"github.com/hashicorp/vault/helper/storagepacker"
|
"github.com/hashicorp/vault/helper/storagepacker"
|
||||||
"github.com/hashicorp/vault/sdk/helper/consts"
|
"github.com/hashicorp/vault/sdk/helper/consts"
|
||||||
"github.com/hashicorp/vault/sdk/logical"
|
"github.com/hashicorp/vault/sdk/logical"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
"google.golang.org/protobuf/types/known/anypb"
|
"google.golang.org/protobuf/types/known/anypb"
|
||||||
"google.golang.org/protobuf/types/known/timestamppb"
|
"google.golang.org/protobuf/types/known/timestamppb"
|
||||||
)
|
)
|
||||||
@@ -83,9 +84,19 @@ func (c *Core) loadIdentityStoreArtifacts(ctx context.Context) error {
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Also reset the conflict resolver so that we report potential duplicates to
|
||||||
|
// be resolved before it's safe to return to case-insensitive mode.
|
||||||
|
reporterResolver := newDuplicateReportingErrorResolver(c.identityStore.logger)
|
||||||
|
c.identityStore.conflictResolver = reporterResolver
|
||||||
|
|
||||||
// Attempt to load identity artifacts once more after memdb is reset to
|
// Attempt to load identity artifacts once more after memdb is reset to
|
||||||
// accept case sensitive names
|
// accept case sensitive names
|
||||||
return loadFunc(ctx)
|
err = loadFunc(ctx)
|
||||||
|
|
||||||
|
// Log reported duplicates if any found whether or not we end up erroring.
|
||||||
|
reporterResolver.LogReport(c.identityStore.logger)
|
||||||
|
|
||||||
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
func (i *IdentityStore) sanitizeName(name string) string {
|
func (i *IdentityStore) sanitizeName(name string) string {
|
||||||
@@ -623,8 +634,8 @@ func (i *IdentityStore) upsertEntityInTxn(ctx context.Context, txn *memdb.Txn, e
|
|||||||
default:
|
default:
|
||||||
// Though this is technically a conflict that should be resolved by the
|
// Though this is technically a conflict that should be resolved by the
|
||||||
// ConflictResolver implementation, the behavior here is a bit nuanced.
|
// ConflictResolver implementation, the behavior here is a bit nuanced.
|
||||||
// Rather than introduce a behavior change, handle this case directly as
|
// Rather than introduce a behavior change, we handle this case directly
|
||||||
// before by merging.
|
// as before by merging.
|
||||||
i.logger.Warn("alias is already tied to a different entity; these entities are being merged",
|
i.logger.Warn("alias is already tied to a different entity; these entities are being merged",
|
||||||
"alias_id", alias.ID,
|
"alias_id", alias.ID,
|
||||||
"other_entity_id", aliasByFactors.CanonicalID,
|
"other_entity_id", aliasByFactors.CanonicalID,
|
||||||
@@ -644,10 +655,34 @@ func (i *IdentityStore) upsertEntityInTxn(ctx context.Context, txn *memdb.Txn, e
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
if strutil.StrListContains(aliasFactors, i.sanitizeName(alias.Name)+alias.MountAccessor) {
|
// This is subtle. We want to call `ResolveAliases` so that the resolver can
|
||||||
if err := i.conflictResolver.ResolveAliases(ctx, entity, aliasByFactors, alias); err != nil && !i.disableLowerCasedNames {
|
// get full insight into all the aliases loaded and generate useful reports
|
||||||
return err
|
// about duplicates. However, we don't want to actually change the error
|
||||||
}
|
// handling behavior from before which would only return an error in a very
|
||||||
|
// specific case (when the alias being added is a duplicate of one for the
|
||||||
|
// same entity and we are not in case-sensitive mode). So we call the method
|
||||||
|
// here unconditionally, but then only handle the resultant error in the
|
||||||
|
// specific case we care about. Note that we choose not to call it `err` to
|
||||||
|
// avoid it being left non-nil in some cases and tripping up later error
|
||||||
|
// handling code, and to signal something different is happening here. Note
|
||||||
|
// that we explicitly _want_ this to be here an not before we merge
|
||||||
|
// duplicates above, because duplicates that have always merged are not a
|
||||||
|
// problem to the user and are already logged. We care about different-case
|
||||||
|
// duplicates that are not being considered duplicates right now because we
|
||||||
|
// are in case-sensitive mode so we can report these to the operator ahead
|
||||||
|
// of them disabling case-sensitive mode.
|
||||||
|
conflictErr := i.conflictResolver.ResolveAliases(ctx, entity, aliasByFactors, alias)
|
||||||
|
|
||||||
|
// This appears to be accounting for any duplicate aliases for the same
|
||||||
|
// Entity. In that case we would have skipped over the merge above in the
|
||||||
|
// `aliasByFactors.CanonicalID == entity.ID` case and made it here. Now we
|
||||||
|
// are here, duplicates are reported and may cause an insert error but only
|
||||||
|
// if we are in default case-insensitive mode. Once we are in case-sensitive
|
||||||
|
// mode we'll happily ignore duplicates of any case! This doesn't seem
|
||||||
|
// especially desirable to me, but we'd rather not change behavior for now.
|
||||||
|
if strutil.StrListContains(aliasFactors, i.sanitizeName(alias.Name)+alias.MountAccessor) &&
|
||||||
|
conflictErr != nil && !i.disableLowerCasedNames {
|
||||||
|
return conflictErr
|
||||||
}
|
}
|
||||||
|
|
||||||
// Insert or update alias in MemDB using the transaction created above
|
// Insert or update alias in MemDB using the transaction created above
|
||||||
@@ -2636,24 +2671,113 @@ func (i *IdentityStore) countEntitiesByMountAccessor(ctx context.Context) (map[s
|
|||||||
return byMountAccessor, nil
|
return byMountAccessor, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func makeEntityForPacker(_t *testing.T, id string, p *storagepacker.StoragePacker) *identity.Entity {
|
func makeEntityForPacker(t *testing.T, name string, p *storagepacker.StoragePacker) *identity.Entity {
|
||||||
|
t.Helper()
|
||||||
|
return makeEntityForPackerWithNamespace(t, namespace.RootNamespaceID, name, p)
|
||||||
|
}
|
||||||
|
|
||||||
|
func makeEntityForPackerWithNamespace(t *testing.T, namespaceID, name string, p *storagepacker.StoragePacker) *identity.Entity {
|
||||||
|
t.Helper()
|
||||||
|
id, err := uuid.GenerateUUID()
|
||||||
|
require.NoError(t, err)
|
||||||
return &identity.Entity{
|
return &identity.Entity{
|
||||||
ID: id,
|
ID: id,
|
||||||
Name: id,
|
Name: name,
|
||||||
NamespaceID: namespace.RootNamespaceID,
|
NamespaceID: namespaceID,
|
||||||
BucketKey: p.BucketKey(id),
|
BucketKey: p.BucketKey(id),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func attachAlias(t *testing.T, e *identity.Entity, name string, me *MountEntry) *identity.Alias {
|
func attachAlias(t *testing.T, e *identity.Entity, name string, me *MountEntry) *identity.Alias {
|
||||||
t.Helper()
|
t.Helper()
|
||||||
|
id, err := uuid.GenerateUUID()
|
||||||
|
require.NoError(t, err)
|
||||||
|
if e.NamespaceID != me.NamespaceID {
|
||||||
|
panic("mount and entity in different namespaces")
|
||||||
|
}
|
||||||
a := &identity.Alias{
|
a := &identity.Alias{
|
||||||
ID: name,
|
ID: id,
|
||||||
Name: name,
|
Name: name,
|
||||||
|
NamespaceID: me.NamespaceID,
|
||||||
CanonicalID: e.ID,
|
CanonicalID: e.ID,
|
||||||
MountType: me.Type,
|
MountType: me.Type,
|
||||||
MountAccessor: me.Accessor,
|
MountAccessor: me.Accessor,
|
||||||
|
Local: me.Local,
|
||||||
}
|
}
|
||||||
e.UpsertAlias(a)
|
e.UpsertAlias(a)
|
||||||
return a
|
return a
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func identityCreateCaseDuplicates(t *testing.T, ctx context.Context, c *Core, upme, localme *MountEntry) {
|
||||||
|
t.Helper()
|
||||||
|
|
||||||
|
if upme.NamespaceID != localme.NamespaceID {
|
||||||
|
panic("both replicated and local auth mounts must be in the same namespace")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create entities with both case-sensitive and case-insensitive duplicate
|
||||||
|
// suffixes.
|
||||||
|
for i, suffix := range []string{"-case", "-case", "-cAsE"} {
|
||||||
|
// Entity duplicated by name
|
||||||
|
e := makeEntityForPackerWithNamespace(t, upme.NamespaceID, "entity"+suffix, c.identityStore.entityPacker)
|
||||||
|
err := TestHelperWriteToStoragePacker(ctx, c.identityStore.entityPacker, e.ID, e)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// Entity that isn't a dupe itself but has duplicated aliases
|
||||||
|
e2 := makeEntityForPackerWithNamespace(t, upme.NamespaceID, fmt.Sprintf("entity-%d", i), c.identityStore.entityPacker)
|
||||||
|
// Add local and non-local aliases for this entity (which will also be
|
||||||
|
// duplicated)
|
||||||
|
attachAlias(t, e2, "alias"+suffix, upme)
|
||||||
|
attachAlias(t, e2, "local-alias"+suffix, localme)
|
||||||
|
err = TestHelperWriteToStoragePacker(ctx, c.identityStore.entityPacker, e2.ID, e2)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// Group duplicated by name
|
||||||
|
g := makeGroupWithNameAndAlias(t, "group"+suffix, "", c.identityStore.groupPacker, upme)
|
||||||
|
err = TestHelperWriteToStoragePacker(ctx, c.identityStore.groupPacker, g.ID, g)
|
||||||
|
require.NoError(t, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func makeGroupWithNameAndAlias(t *testing.T, name, alias string, p *storagepacker.StoragePacker, me *MountEntry) *identity.Group {
|
||||||
|
t.Helper()
|
||||||
|
id, err := uuid.GenerateUUID()
|
||||||
|
require.NoError(t, err)
|
||||||
|
id2, err := uuid.GenerateUUID()
|
||||||
|
require.NoError(t, err)
|
||||||
|
g := &identity.Group{
|
||||||
|
ID: id,
|
||||||
|
Name: name,
|
||||||
|
NamespaceID: me.NamespaceID,
|
||||||
|
BucketKey: p.BucketKey(id),
|
||||||
|
}
|
||||||
|
if alias != "" {
|
||||||
|
g.Alias = &identity.Alias{
|
||||||
|
ID: id2,
|
||||||
|
Name: alias,
|
||||||
|
CanonicalID: id,
|
||||||
|
MountType: me.Type,
|
||||||
|
MountAccessor: me.Accessor,
|
||||||
|
NamespaceID: me.NamespaceID,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return g
|
||||||
|
}
|
||||||
|
|
||||||
|
func makeLocalAliasWithName(t *testing.T, name, entityID string, bucketKey string, me *MountEntry) *identity.LocalAliases {
|
||||||
|
t.Helper()
|
||||||
|
id, err := uuid.GenerateUUID()
|
||||||
|
require.NoError(t, err)
|
||||||
|
return &identity.LocalAliases{
|
||||||
|
Aliases: []*identity.Alias{
|
||||||
|
{
|
||||||
|
ID: id,
|
||||||
|
Name: name,
|
||||||
|
CanonicalID: entityID,
|
||||||
|
MountType: me.Type,
|
||||||
|
MountAccessor: me.Accessor,
|
||||||
|
NamespaceID: me.NamespaceID,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@@ -2225,6 +2225,10 @@ var (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func TestUserpassMount(c *Core, local bool) (*MountEntry, error) {
|
func TestUserpassMount(c *Core, local bool) (*MountEntry, error) {
|
||||||
|
return TestUserpassMountContext(namespace.RootContext(nil), c, local)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestUserpassMountContext(ctx context.Context, c *Core, local bool) (*MountEntry, error) {
|
||||||
name := "userpass"
|
name := "userpass"
|
||||||
if local {
|
if local {
|
||||||
name += "-local"
|
name += "-local"
|
||||||
@@ -2234,10 +2238,12 @@ func TestUserpassMount(c *Core, local bool) (*MountEntry, error) {
|
|||||||
Path: name + "/",
|
Path: name + "/",
|
||||||
Type: "userpass",
|
Type: "userpass",
|
||||||
Description: name,
|
Description: name,
|
||||||
Accessor: name,
|
// Don't specify an accessor so we use a random one otherwise we will cause
|
||||||
Local: local,
|
// horrible issues when we try to create a new mount on a different
|
||||||
|
// namespace but they have the same accessor!
|
||||||
|
Local: local,
|
||||||
}
|
}
|
||||||
if err := c.enableCredential(namespace.RootContext(nil), userpassMe); err != nil {
|
if err := c.enableCredential(ctx, userpassMe); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
return userpassMe, nil
|
return userpassMe, nil
|
||||||
|
|||||||
Reference in New Issue
Block a user