Files
vault/tools/pipeline/internal/pkg/generate/enos_dynamic_config_test.go
Ryan Cragun cda9ad3491 VAULT-33074: add github sub-command to pipeline (#29403)
* VAULT-33074: add `github` sub-command to `pipeline`

Investigating test workflow failures is common task that engineers on the
sustaining rotation perform. This task often requires quite a bit of
manual labor by manually inspecting all failed/cancelled workflows in
the Github UI on per repo/branch/workflow basis and performing root cause
analysis.

As we work to improve our pipeline discoverability this PR adds a new `github`
sub-command to the `pipeline` utility that allows querying for such workflows
and returning either machine readable or human readable summaries in a single
place. Eventually we plan to automate sending a summary of this data to
an OTEL collector automatically but for now sustaining engineers can
utilize it to query for workflows with lots of various criteria.

A common pattern for investigating build/enos test failure workflows would be:
```shell
export GITHUB_TOKEN="YOUR_TOKEN"
go run -race ./tools/pipeline/... github list-workflow-runs -o hashicorp -r vault -d '2025-01-13..2025-01-23' --branch main --status failure build
```

This will list `build` workflow runs in `hashicorp/vault` repo for the
`main` branch with the `status` or `conclusion` of `failure` within the date
range of `2025-01-13..2025-01-23`.

A sustaining engineer will likely do this for both `vault` and
`vault-enterprise` repositories along with `enos-release-testing-oss` and
`enos-release-testing-ent` workflows in addition to `build` in order to
get a full picture of the last weeks failures.

You can also use this utility to summarize workflows based on other
statuses, branches, HEAD SHA's, event triggers, github actors, etc. For
a full list of filter arguments you can pass `-h` to the sub-command.

> [!CAUTION]
> Be careful not to run this without setting strict filter arguments.
> Failing to do so could result in trying to summarize way too many
> workflows resulting in your API token being disabled for an hour.

Signed-off-by: Ryan Cragun <me@ryan.ec>
2025-01-31 13:48:38 -07:00

280 lines
6.7 KiB
Go

// Copyright (c) HashiCorp, Inc.
// SPDX-License-Identifier: BUSL-1.1
package generate
import (
"context"
"os"
"path/filepath"
"slices"
"testing"
"github.com/stretchr/testify/require"
"github.com/hashicorp/vault/tools/pipeline/internal/pkg/releases"
)
var testAPIVersions = []string{
"1.16.10+ent.hsm.fips1402",
"1.16.10+ent.fips1402",
"1.16.10+ent.hsm",
"1.16.10+ent",
"1.17.6+ent.hsm.fips1402",
"1.17.6+ent.fips1402",
"1.17.6+ent.hsm",
"1.17.6+ent",
"1.18.0-rc1+ent.hsm.fips1402",
"1.18.0-rc1+ent.fips1402",
"1.18.0-rc1+ent.hsm",
"1.18.0-rc1+ent",
"1.17.5+ent.hsm.fips1402",
"1.17.5+ent.fips1402",
"1.17.5+ent.hsm",
"1.17.5+ent",
"1.16.9+ent.hsm.fips1402",
"1.16.9+ent.fips1402",
"1.16.9+ent.hsm",
"1.16.9+ent",
"1.17.4+ent.hsm.fips1402",
"1.17.4+ent.fips1402",
"1.17.4+ent.hsm",
"1.17.4+ent",
"1.16.8+ent.hsm.fips1402",
"1.16.8+ent.fips1402",
"1.16.8+ent.hsm",
"1.16.8+ent",
"1.17.3+ent.hsm.fips1402",
"1.17.3+ent.fips1402",
"1.17.3+ent.hsm",
"1.16.7+ent.hsm.fips1402",
"1.17.3+ent",
"1.16.7+ent.fips1402",
"1.16.7+ent.hsm",
"1.16.7+ent",
"1.17.2+ent.hsm.fips1402",
"1.17.2+ent.fips1402",
"1.17.2+ent.hsm",
"1.17.2+ent",
"1.16.6+ent.hsm.fips1402",
"1.16.6+ent.fips1402",
"1.16.6+ent.hsm",
"1.16.6+ent",
}
var testAllVersions = []string{
"1.16.6",
"1.16.7",
"1.16.8",
"1.16.9",
"1.16.10",
"1.17.2",
"1.17.3",
"1.17.4",
"1.17.5",
"1.17.6",
"1.18.0-rc1",
}
func Test_EnosDynamicConfigReq_Validate(t *testing.T) {
t.Parallel()
for name, test := range map[string]struct {
in *EnosDynamicConfigReq
fail bool
}{
"ce edition": {
in: &EnosDynamicConfigReq{
VaultEdition: "ce",
VaultVersion: "1.18.0",
EnosDir: t.TempDir(),
FileName: "test.hcl",
VersionLister: releases.NewMockClient(testAPIVersions),
},
},
"oss edition": {
in: &EnosDynamicConfigReq{
VaultEdition: "oss",
VaultVersion: "1.18.0",
EnosDir: t.TempDir(),
FileName: "test.hcl",
VersionLister: releases.NewMockClient(testAPIVersions),
},
},
"ent edition": {
in: &EnosDynamicConfigReq{
VaultEdition: "ent",
VaultVersion: "1.18.0",
EnosDir: t.TempDir(),
FileName: "test.hcl",
VersionLister: releases.NewMockClient(testAPIVersions),
},
},
"enterprise edition": {
in: &EnosDynamicConfigReq{
VaultEdition: "enterprise",
VaultVersion: "1.18.0",
EnosDir: t.TempDir(),
FileName: "test.hcl",
VersionLister: releases.NewMockClient(testAPIVersions),
},
},
"ent.hsm edition": {
in: &EnosDynamicConfigReq{
VaultEdition: "ent.hsm",
VaultVersion: "1.18.0",
EnosDir: t.TempDir(),
FileName: "test.hcl",
VersionLister: releases.NewMockClient(testAPIVersions),
},
},
"ent.fips1402 edition": {
in: &EnosDynamicConfigReq{
VaultEdition: "ent.fips1402",
VaultVersion: "1.18.0",
EnosDir: t.TempDir(),
FileName: "test.hcl",
VersionLister: releases.NewMockClient(testAPIVersions),
},
},
"ent.hsm.fips1402 edition": {
in: &EnosDynamicConfigReq{
VaultEdition: "ent.hsm.fips1402",
VaultVersion: "1.18.0",
EnosDir: t.TempDir(),
FileName: "test.hcl",
VersionLister: releases.NewMockClient(testAPIVersions),
},
},
"unknown edition": {
in: &EnosDynamicConfigReq{
VaultEdition: "ent.nope",
VaultVersion: "1.18.0",
EnosDir: t.TempDir(),
FileName: "test.hcl",
},
fail: true,
},
"invalid version": {
in: &EnosDynamicConfigReq{
VaultEdition: "ent.hsm.fips1402",
VaultVersion: "vault-1.18.0",
EnosDir: t.TempDir(),
FileName: "test.hcl",
},
fail: true,
},
"target dir doesn't exist": {
in: &EnosDynamicConfigReq{
VaultEdition: "ent.hsm.fips1402",
VaultVersion: "1.18.0",
},
fail: true,
},
"no file name": {
in: &EnosDynamicConfigReq{
VaultEdition: "ent.hsm.fips1402",
VaultVersion: "1.18.0",
EnosDir: t.TempDir(),
},
fail: true,
},
"no version lister": {
in: &EnosDynamicConfigReq{
VaultEdition: "ent.hsm.fips1402",
VaultVersion: "1.18.0",
EnosDir: t.TempDir(),
FileName: "test.hcl",
},
fail: true,
},
} {
t.Run(name, func(t *testing.T) {
t.Parallel()
err := test.in.Validate(context.Background())
if test.fail {
require.Error(t, err)
} else {
require.NoError(t, err)
}
})
}
}
func Test_EnosDynamicConfigReq_Run(t *testing.T) {
t.Parallel()
for desc, test := range map[string]struct {
req *EnosDynamicConfigReq
res func() *EnosDynamicConfigRes
hcl []byte
fail bool
}{
"default config": {
req: &EnosDynamicConfigReq{
FileName: "test.hcl",
VaultEdition: "ent.hsm.fips1402",
VaultVersion: "1.18.0",
Skip: []string{"1.17.2", "1.17.5"},
NMinus: 2,
EnosDir: t.TempDir(),
VersionLister: releases.NewMockClient(testAPIVersions),
},
res: func() *EnosDynamicConfigRes {
versions := testAllVersions
versions = slices.DeleteFunc(versions, func(v string) bool {
return v == "1.17.2" || v == "1.17.5"
})
return &EnosDynamicConfigRes{
Globals: &Globals{
SampleAttributes: &SampleAttrs{
AWSRegion: []string{"us-east-1", "us-west-2"},
DistroVersionAmzn: []string{"2023"},
DistroVersionLeap: []string{"15.6"},
DistroVersionRhel: []string{"8.10", "9.4"},
DistroVersionSles: []string{"15.6"},
DistroVersionUbuntu: []string{"20.04", "24.04"},
UpgradeInitialVersion: versions,
},
},
}
},
hcl: []byte(`# Copyright (c) HashiCorp, Inc.
# SPDX-License-Identifier: BUSL-1.1
# Code generated by pipeline generate enos-dynamic-config DO NOT EDIT.
# This file is overwritten in CI as it contains branch specific and sometimes ever-changing values.
# It's checked in here so that enos samples and scenarios can be performed, just be aware that this
# might change out from under you.
globals {
sample_attributes = {
aws_region = ["us-east-1", "us-west-2"]
distro_version_amzn = ["2023"]
distro_version_leap = ["15.6"]
distro_version_rhel = ["8.10", "9.4"]
distro_version_sles = ["15.6"]
distro_version_ubuntu = ["20.04", "24.04"]
upgrade_initial_version = ["1.16.6", "1.16.7", "1.16.8", "1.16.9", "1.16.10", "1.17.3", "1.17.4", "1.17.6", "1.18.0-rc1"]
}
}
`),
},
} {
t.Run(desc, func(t *testing.T) {
t.Parallel()
res, err := test.req.Run(context.Background())
if test.fail {
require.Error(t, err)
return
}
require.NoError(t, err)
require.EqualValues(t, test.res(), res)
b, err := os.ReadFile(filepath.Join(test.req.EnosDir, test.req.FileName))
require.NoError(t, err)
require.EqualValuesf(t, test.hcl, b, string(b))
})
}
}