diff --git a/.gitignore b/.gitignore index 4aa5e6e60..28dd2716d 100644 --- a/.gitignore +++ b/.gitignore @@ -23,6 +23,9 @@ elastic-package # IDEA .idea +# VSCode +.vscode + # Build directory /build diff --git a/README.md b/README.md index 224baca79..037bf5cc8 100644 --- a/README.md +++ b/README.md @@ -366,6 +366,29 @@ Use this command to export ingest pipelines with referenced pipelines from the E Use this command to download selected ingest pipelines and its referenced processor pipelines from Elasticsearch. Select data stream or the package root directories to download the pipelines. Pipelines are downloaded as is and will need adjustment to meet your package needs. +### `elastic-package filter [flags]` + +_Context: package_ + +This command gives you a list of all packages based on the given query. + +The command will search for packages in the working directory for default depth of 2 and +return the list of packages that match the given criteria. + +Use --change-directory to change the working directory and --depth to change the depth of the search. + +### `elastic-package foreach [flags] -- ` + +_Context: package_ + +Execute a command for each package matching the given filter criteria. + +This command combines filtering capabilities with command execution, allowing you to run +any elastic-package subcommand across multiple packages in a single operation. + +The command uses the same filter flags as the 'filter' command to select packages, +then executes the specified subcommand for each matched package. + ### `elastic-package format` _Context: package_ diff --git a/cmd/filter.go b/cmd/filter.go new file mode 100644 index 000000000..1bcaf2e4c --- /dev/null +++ b/cmd/filter.go @@ -0,0 +1,120 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package cmd + +import ( + "fmt" + "io" + "os" + + "github.com/spf13/cobra" + + "github.com/elastic/elastic-package/internal/cobraext" + "github.com/elastic/elastic-package/internal/filter" + "github.com/elastic/elastic-package/internal/packages" +) + +const filterLongDescription = `This command gives you a list of all packages based on the given query. + +The command will search for packages in the working directory for default depth of 2 and +return the list of packages that match the given criteria. + +Use --change-directory to change the working directory and --depth to change the depth of the search.` + +const filterExample = `elastic-package filter --inputs tcp,udp --categories security --depth 3 --output json +elastic-package filter --packages 'cisco_*,fortinet_*' --output yaml +` + +func setupFilterCommand() *cobraext.Command { + cmd := &cobra.Command{ + Use: "filter [flags]", + Short: "filter integrations based on given flags", + Long: filterLongDescription, + Args: cobra.NoArgs, + RunE: filterCommandAction, + Example: filterExample, + } + + // add filter flags to the command (input, code owner, kibana version, categories) + filter.SetFilterFlags(cmd) + + // add the output package name and absolute path flags to the command + cmd.Flags().StringP(cobraext.FilterOutputFlagName, cobraext.FilterOutputFlagShorthand, "", cobraext.FilterOutputFlagDescription) + cmd.Flags().StringP(cobraext.FilterOutputInfoFlagName, "", cobraext.FilterOutputInfoFlagDefault, cobraext.FilterOutputInfoFlagDescription) + + return cobraext.NewCommand(cmd, cobraext.ContextPackage) +} + +func filterCommandAction(cmd *cobra.Command, args []string) error { + filtered, err := filterPackage(cmd) + if err != nil { + return fmt.Errorf("filtering packages failed: %w", err) + } + + outputFormatStr, err := cmd.Flags().GetString(cobraext.FilterOutputFlagName) + if err != nil { + return fmt.Errorf("getting output format flag failed: %w", err) + } + + outputInfoStr, err := cmd.Flags().GetString(cobraext.FilterOutputInfoFlagName) + if err != nil { + return fmt.Errorf("getting output info flag failed: %w", err) + } + + outputOptions, err := filter.NewOutputOptions(outputInfoStr, outputFormatStr) + if err != nil { + return fmt.Errorf("creating output options failed: %w", err) + } + + if err = printPkgList(filtered, outputOptions, os.Stdout); err != nil { + return fmt.Errorf("printing JSON failed: %w", err) + } + + return nil +} + +func filterPackage(cmd *cobra.Command) ([]packages.PackageDirNameAndManifest, error) { + depth, err := cmd.Flags().GetInt(cobraext.FilterDepthFlagName) + if err != nil { + return nil, fmt.Errorf("getting depth flag failed: %w", err) + } + + excludeDirs, err := cmd.Flags().GetString(cobraext.FilterExcludeDirFlagName) + if err != nil { + return nil, fmt.Errorf("getting exclude-dir flag failed: %w", err) + } + + filters := filter.NewFilterRegistry(depth, excludeDirs) + + if err := filters.Parse(cmd); err != nil { + return nil, fmt.Errorf("parsing filter options failed: %w", err) + } + + if err := filters.Validate(); err != nil { + return nil, fmt.Errorf("validating filter options failed: %w", err) + } + + currDir, err := os.Getwd() + if err != nil { + return nil, fmt.Errorf("getting current directory failed: %w", err) + } + filtered, errors := filters.Execute(currDir) + if errors != nil { + return nil, fmt.Errorf("filtering packages failed: %s", errors.Error()) + } + + return filtered, nil +} + +func printPkgList(pkgs []packages.PackageDirNameAndManifest, outputOptions *filter.OutputOptions, w io.Writer) error { + formatted, err := outputOptions.ApplyTo(pkgs) + if err != nil { + return fmt.Errorf("applying output format failed: %w", err) + } + + // write the formatted packages to the writer + _, err = io.WriteString(w, formatted+"\n") + return err +} diff --git a/cmd/foreach.go b/cmd/foreach.go new file mode 100644 index 000000000..5347c69db --- /dev/null +++ b/cmd/foreach.go @@ -0,0 +1,97 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package cmd + +import ( + "fmt" + "slices" + "strings" + + "github.com/spf13/cobra" + + "github.com/elastic/elastic-package/internal/cobraext" + "github.com/elastic/elastic-package/internal/filter" + "github.com/elastic/elastic-package/internal/logger" + "github.com/elastic/elastic-package/internal/multierror" +) + +const foreachLongDescription = `Execute a command for each package matching the given filter criteria. + +This command combines filtering capabilities with command execution, allowing you to run +any elastic-package subcommand across multiple packages in a single operation. + +The command uses the same filter flags as the 'filter' command to select packages, +then executes the specified subcommand for each matched package.` + +// getAllowedSubCommands returns the list of allowed subcommands for the foreach command. +func getAllowedSubCommands() []string { + return []string{ + "build", + "check", + "changelog", + "clean", + "format", + "install", + "lint", + "test", + "uninstall", + } +} + +func setupForeachCommand() *cobraext.Command { + cmd := &cobra.Command{ + Use: "foreach [flags] -- ", + Short: "Execute a command for filtered packages", + Long: foreachLongDescription, + Example: ` # Run system tests for packages with specific inputs + elastic-package foreach --input tcp,udp -- test system -g`, + RunE: foreachCommandAction, + Args: cobra.MinimumNArgs(1), + } + + // Add filter flags + filter.SetFilterFlags(cmd) + + return cobraext.NewCommand(cmd, cobraext.ContextPackage) +} + +func foreachCommandAction(cmd *cobra.Command, args []string) error { + if err := validateSubCommand(args[0]); err != nil { + return fmt.Errorf("validating sub command failed: %w", err) + } + + // reuse filterPackage from cmd/filter.go + filtered, err := filterPackage(cmd) + if err != nil { + return fmt.Errorf("filtering packages failed: %w", err) + } + + errors := multierror.Error{} + + for _, pkg := range filtered { + rootCmd := cmd.Root() + rootCmd.SetArgs(append(args, "--change-directory", pkg.Path)) + if err := rootCmd.Execute(); err != nil { + errors = append(errors, err) + } + } + + logger.Infof("Successfully executed command for %d packages", len(filtered)-len(errors)) + + if errors.Error() != "" { + logger.Errorf("Errors occurred for %d packages", len(errors)) + return fmt.Errorf("errors occurred while executing command for packages: \n%s", errors.Error()) + } + + return nil +} + +func validateSubCommand(subCommand string) error { + if !slices.Contains(getAllowedSubCommands(), subCommand) { + return fmt.Errorf("invalid subcommand: %s. Allowed subcommands are: [%s]", subCommand, strings.Join(getAllowedSubCommands(), ", ")) + } + + return nil +} diff --git a/cmd/root.go b/cmd/root.go index e449ff516..ae5ffe2d9 100644 --- a/cmd/root.go +++ b/cmd/root.go @@ -26,7 +26,9 @@ var commands = []*cobraext.Command{ setupDumpCommand(), setupEditCommand(), setupExportCommand(), + setupFilterCommand(), setupFormatCommand(), + setupForeachCommand(), setupInstallCommand(), setupLinksCommand(), setupLintCommand(), diff --git a/go.mod b/go.mod index 0ca4769ba..dbc04930b 100644 --- a/go.mod +++ b/go.mod @@ -21,6 +21,7 @@ require ( github.com/elastic/package-spec/v3 v3.5.2 github.com/fatih/color v1.18.0 github.com/go-viper/mapstructure/v2 v2.4.0 + github.com/gobwas/glob v0.2.3 github.com/google/go-cmp v0.7.0 github.com/google/go-github/v32 v32.1.0 github.com/google/go-querystring v1.1.0 @@ -34,6 +35,7 @@ require ( github.com/shirou/gopsutil/v3 v3.24.5 github.com/spf13/cobra v1.10.1 github.com/stretchr/testify v1.11.1 + go.yaml.in/yaml/v2 v2.4.2 go4.org/netipx v0.0.0-20231129151722-fdeea329fbba golang.org/x/tools v0.39.0 gopkg.in/dnaeon/go-vcr.v3 v3.2.0 @@ -157,7 +159,6 @@ require ( github.com/yuin/goldmark v1.7.13 // indirect github.com/yusufpapurcu/wmi v1.2.4 // indirect go.mongodb.org/mongo-driver v1.11.1 // indirect - go.yaml.in/yaml/v2 v2.4.2 // indirect go.yaml.in/yaml/v3 v3.0.4 // indirect golang.org/x/crypto v0.44.0 // indirect golang.org/x/exp/typeparams v0.0.0-20231108232855-2478ac86f678 // indirect diff --git a/go.sum b/go.sum index c3d7df23b..2d72224b0 100644 --- a/go.sum +++ b/go.sum @@ -144,6 +144,8 @@ github.com/go-task/slim-sprig/v3 v3.0.0 h1:sUs3vkvUymDpBKi3qH1YSqBQk9+9D/8M2mN1v github.com/go-task/slim-sprig/v3 v3.0.0/go.mod h1:W848ghGpv3Qj3dhTPRyJypKRiqCdHZiAzKg9hl15HA8= github.com/go-viper/mapstructure/v2 v2.4.0 h1:EBsztssimR/CONLSZZ04E8qAkxNYq4Qp9LvH92wZUgs= github.com/go-viper/mapstructure/v2 v2.4.0/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM= +github.com/gobwas/glob v0.2.3 h1:A4xDbljILXROh+kObIiy5kIaPYD8e96x1tgBhUI5J+Y= +github.com/gobwas/glob v0.2.3/go.mod h1:d3Ez4x06l9bZtSvzIay5+Yzi0fmZzPgnTbPcKjJAkT8= github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= diff --git a/internal/cobraext/flags.go b/internal/cobraext/flags.go index 487c28aa1..f3ab28aad 100644 --- a/internal/cobraext/flags.go +++ b/internal/cobraext/flags.go @@ -133,8 +133,49 @@ const ( FailOnMissingFlagName = "fail-on-missing" FailOnMissingFlagDescription = "fail if tests are missing" - FailFastFlagName = "fail-fast" - FailFastFlagDescription = "fail immediately if any file requires updates (do not overwrite)" + FailFastFlagName = "fail-fast" + FailFastFlagDescription = "fail immediately if any file requires updates (do not overwrite)" + + FilterCategoriesFlagName = "categories" + FilterCategoriesFlagDescription = "integration categories to filter by (comma-separated values)" + + FilterCodeOwnerFlagName = "code-owners" + FilterCodeOwnerFlagDescription = "code owners to filter by (comma-separated values)" + + FilterDepthFlagName = "depth" + FilterDepthFlagDescription = "maximum depth to search for packages" + FilterDepthFlagDefault = 2 + FilterDepthFlagShorthand = "d" + + FilterExcludeDirFlagName = "exclude-dirs" + FilterExcludeDirFlagDescription = "comma-separated list of directories to exclude from search" + + FilterInputFlagName = "inputs" + FilterInputFlagDescription = "name of the inputs to filter by (comma-separated values)" + + FilterKibanaVersionFlagName = "kibana-version" + FilterKibanaVersionFlagDescription = "kibana version to filter by (semver)" + + FilterOutputFlagName = "output" + FilterOutputFlagDescription = "format of the output. Available options: json, yaml (leave empty for newline-separated list)" + FilterOutputFlagShorthand = "o" + + FilterOutputInfoFlagName = "output-info" + FilterOutputInfoFlagDescription = "output information about the packages. Available options: pkgname, dirname, absolute" + FilterOutputInfoFlagDefault = "dirname" + + FilterPackageDirNameFlagName = "package-dirs" + FilterPackageDirNameFlagDescription = "package directories to filter by (comma-separated values)" + + FilterPackagesFlagName = "packages" + FilterPackagesFlagDescription = "package names to filter by (comma-separated values)" + + FilterPackageTypeFlagName = "package-types" + FilterPackageTypeFlagDescription = "package types to filter by (comma-separated values)" + + FilterSpecVersionFlagName = "spec-version" + FilterSpecVersionFlagDescription = "Package spec version to filter by (semver)" + GenerateTestResultFlagName = "generate" GenerateTestResultFlagDescription = "generate test result file" diff --git a/internal/filter/category.go b/internal/filter/category.go new file mode 100644 index 000000000..5c170ca41 --- /dev/null +++ b/internal/filter/category.go @@ -0,0 +1,62 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package filter + +import ( + "github.com/spf13/cobra" + + "github.com/elastic/elastic-package/internal/cobraext" + "github.com/elastic/elastic-package/internal/packages" +) + +type CategoryFlag struct { + FilterFlagBase + + values []string +} + +func (f *CategoryFlag) Parse(cmd *cobra.Command) error { + category, err := cmd.Flags().GetString(cobraext.FilterCategoriesFlagName) + if err != nil { + return cobraext.FlagParsingError(err, cobraext.FilterCategoriesFlagName) + } + if category == "" { + return nil + } + + categories := splitAndTrim(category, ",") + f.values = categories + f.isApplied = true + return nil +} + +func (f *CategoryFlag) Validate() error { + return nil +} + +func (f *CategoryFlag) Matches(dirName string, manifest *packages.PackageManifest) bool { + return hasAnyMatch(f.values, manifest.Categories) +} + +func (f *CategoryFlag) ApplyTo(pkgs []packages.PackageDirNameAndManifest) ([]packages.PackageDirNameAndManifest, error) { + filtered := make([]packages.PackageDirNameAndManifest, 0, len(pkgs)) + for _, pkg := range pkgs { + if f.Matches(pkg.DirName, pkg.Manifest) { + filtered = append(filtered, pkg) + } + } + return filtered, nil +} + +func initCategoryFlag() *CategoryFlag { + return &CategoryFlag{ + FilterFlagBase: FilterFlagBase{ + name: cobraext.FilterCategoriesFlagName, + description: cobraext.FilterCategoriesFlagDescription, + shorthand: "", + defaultValue: "", + }, + } +} diff --git a/internal/filter/codeowner.go b/internal/filter/codeowner.go new file mode 100644 index 000000000..d80b98297 --- /dev/null +++ b/internal/filter/codeowner.go @@ -0,0 +1,73 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package filter + +import ( + "fmt" + + "github.com/spf13/cobra" + + "github.com/elastic/elastic-package/internal/cobraext" + "github.com/elastic/elastic-package/internal/packages" + "github.com/elastic/elastic-package/internal/tui" +) + +type CodeOwnerFlag struct { + FilterFlagBase + values []string +} + +func (f *CodeOwnerFlag) Parse(cmd *cobra.Command) error { + codeOwners, err := cmd.Flags().GetString(cobraext.FilterCodeOwnerFlagName) + if err != nil { + return cobraext.FlagParsingError(err, cobraext.FilterCodeOwnerFlagName) + } + if codeOwners == "" { + return nil + } + + f.values = splitAndTrim(codeOwners, ",") + f.isApplied = true + return nil +} + +func (f *CodeOwnerFlag) Validate() error { + validator := tui.Validator{Cwd: "."} + + if f.values != nil { + for _, value := range f.values { + if err := validator.GithubOwner(value); err != nil { + return fmt.Errorf("invalid code owner: %s: %w", value, err) + } + } + } + + return nil +} + +func (f *CodeOwnerFlag) Matches(dirName string, manifest *packages.PackageManifest) bool { + return hasAnyMatch(f.values, []string{manifest.Owner.Github}) +} + +func (f *CodeOwnerFlag) ApplyTo(pkgs []packages.PackageDirNameAndManifest) ([]packages.PackageDirNameAndManifest, error) { + filtered := make([]packages.PackageDirNameAndManifest, 0, len(pkgs)) + for _, pkg := range pkgs { + if f.Matches(pkg.DirName, pkg.Manifest) { + filtered = append(filtered, pkg) + } + } + return filtered, nil +} + +func initCodeOwnerFlag() *CodeOwnerFlag { + return &CodeOwnerFlag{ + FilterFlagBase: FilterFlagBase{ + name: cobraext.FilterCodeOwnerFlagName, + description: cobraext.FilterCodeOwnerFlagDescription, + shorthand: "", + defaultValue: "", + }, + } +} diff --git a/internal/filter/filters_test.go b/internal/filter/filters_test.go new file mode 100644 index 000000000..5a528fb28 --- /dev/null +++ b/internal/filter/filters_test.go @@ -0,0 +1,294 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package filter + +import ( + "path/filepath" + "strings" + "testing" + + "github.com/spf13/cobra" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/elastic/elastic-package/internal/cobraext" + "github.com/elastic/elastic-package/internal/packages" +) + +func loadTestPackages(t *testing.T) []packages.PackageDirNameAndManifest { + t.Helper() + testPackagesPath, err := filepath.Abs("../../test/packages") + require.NoError(t, err) + + pkgs, err := packages.ReadAllPackageManifestsFromRepo(testPackagesPath, cobraext.FilterDepthFlagDefault, "") + require.NoError(t, err) + require.NotEmpty(t, pkgs, "no packages found in test/packages") + + return pkgs +} + +func parseFlag(t *testing.T, f Filter, flagName, value string) { + t.Helper() + cmd := &cobra.Command{} + f.Register(cmd) + err := cmd.Flags().Set(flagName, value) + require.NoError(t, err) + err = f.Parse(cmd) + require.NoError(t, err) +} + +func TestCategoryFlag_Matches(t *testing.T) { + pkgs := loadTestPackages(t) + + // Find a real category + var realCategory string + if len(pkgs[0].Manifest.Categories) > 0 { + realCategory = pkgs[0].Manifest.Categories[0] + } else { + realCategory = "security" // Fallback + } + + tests := []struct { + name string + categories []string + wantMatch bool + }{ + {"match existing", []string{realCategory}, true}, + {"no match random", []string{"random_category_xyz"}, false}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + f := initCategoryFlag() + parseFlag(t, f, cobraext.FilterCategoriesFlagName, strings.Join(tt.categories, ",")) + + matched := false + for _, pkg := range pkgs { + if f.Matches(pkg.DirName, pkg.Manifest) { + matched = true + break + } + } + assert.Equal(t, tt.wantMatch, matched, "Category match expectation failed for %v", tt.categories) + }) + } +} + +func TestInputFlag_Matches(t *testing.T) { + pkgs := loadTestPackages(t) + + // Find a real input (simplified extraction) + realInput := "logfile" // Common input, fallback + for _, pkg := range pkgs { + for _, pt := range pkg.Manifest.PolicyTemplates { + if pt.Input != "" { + realInput = pt.Input + break + } + for _, inp := range pt.Inputs { + if inp.Type != "" { + realInput = inp.Type + break + } + } + } + } + + tests := []struct { + name string + inputs []string + wantMatch bool + }{ + {"match existing", []string{realInput}, true}, + {"no match random", []string{"random_input_xyz"}, false}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + f := initInputFlag() + parseFlag(t, f, cobraext.FilterInputFlagName, strings.Join(tt.inputs, ",")) + + matched := false + for _, pkg := range pkgs { + if f.Matches(pkg.DirName, pkg.Manifest) { + matched = true + break + } + } + assert.Equal(t, tt.wantMatch, matched, "Input match expectation failed for %v", tt.inputs) + }) + } +} + +func TestCodeOwnerFlag_Matches(t *testing.T) { + pkgs := loadTestPackages(t) + + realOwner := pkgs[0].Manifest.Owner.Github + + tests := []struct { + name string + owner []string + wantMatch bool + }{ + {"match existing", []string{realOwner}, true}, + {"no match random", []string{"random_owner_xyz"}, false}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + f := initCodeOwnerFlag() + parseFlag(t, f, cobraext.FilterCodeOwnerFlagName, strings.Join(tt.owner, ",")) + + matched := false + for _, pkg := range pkgs { + if f.Matches(pkg.DirName, pkg.Manifest) { + matched = true + break + } + } + assert.Equal(t, tt.wantMatch, matched, "Owner match expectation failed for %v", tt.owner) + }) + } +} + +func TestPackageDirNameFlag_Matches(t *testing.T) { + pkgs := loadTestPackages(t) + realPkg := pkgs[0] + + tests := []struct { + name string + dirNames []string + wantMatch bool + }{ + {"match existing", []string{realPkg.DirName}, true}, + {"no match random", []string{"random_dirname_xyz"}, false}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + f := initPackageDirNameFlag() + parseFlag(t, f, cobraext.FilterPackageDirNameFlagName, strings.Join(tt.dirNames, ",")) + + matched := false + for _, pkg := range pkgs { + if f.Matches(pkg.DirName, pkg.Manifest) { + matched = true + break + } + } + assert.Equal(t, tt.wantMatch, matched, "DirName match expectation failed for %v", tt.dirNames) + }) + } +} + +func TestPackageNameFlag_Matches(t *testing.T) { + pkgs := loadTestPackages(t) + realPkg := pkgs[0] + + tests := []struct { + name string + pkgNames []string + wantMatch bool + }{ + {"match existing", []string{realPkg.Manifest.Name}, true}, + {"no match random", []string{"random_pkgname_xyz"}, false}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + f := initPackageNameFlag() + parseFlag(t, f, cobraext.FilterPackagesFlagName, strings.Join(tt.pkgNames, ",")) + + matched := false + for _, pkg := range pkgs { + if f.Matches(pkg.DirName, pkg.Manifest) { + matched = true + break + } + } + assert.Equal(t, tt.wantMatch, matched, "PackageName match expectation failed for %v", tt.pkgNames) + }) + } +} + +func TestPackageTypeFlag_Matches(t *testing.T) { + pkgs := loadTestPackages(t) + realType := pkgs[0].Manifest.Type + + tests := []struct { + name string + pkgTypes []string + wantMatch bool + }{ + {"match existing", []string{realType}, true}, + {"no match non existing", []string{"non_existing_type"}, false}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + f := initPackageTypeFlag() + parseFlag(t, f, cobraext.FilterPackageTypeFlagName, strings.Join(tt.pkgTypes, ",")) + + matched := false + for _, pkg := range pkgs { + if f.Matches(pkg.DirName, pkg.Manifest) { + matched = true + break + } + } + assert.Equal(t, tt.wantMatch, matched, "PackageType match expectation failed for %v", tt.pkgTypes) + }) + } +} + +func TestSpecVersionFlag_Matches(t *testing.T) { + pkgs := []packages.PackageDirNameAndManifest{ + { + DirName: "test-pkg-1", + Manifest: &packages.PackageManifest{ + SpecVersion: "3.2.1", + }, + }, + { + DirName: "test-pkg-2", + Manifest: &packages.PackageManifest{ + SpecVersion: "3.2.2", + }, + }, + } + + tests := []struct { + name string + specVersions []string + wantMatch bool + }{ + {"match existing", []string{"3.2.1"}, true}, + {"no match random", []string{"1.1.0"}, false}, + {"match operator", []string{">= 3.0.0"}, true}, + {"no match operator", []string{"< 3.0.0"}, false}, + {"match multiple", []string{">= 3.0.0", "<= 3.2.2"}, true}, + {"no match multiple", []string{"<= 3.0.0", "> 3.2.2"}, false}, + {"match minor", []string{"3.x"}, true}, + {"no match minor", []string{"3.1.x"}, false}, + {"match major", []string{"3.x"}, true}, + {"no match major", []string{"2.x"}, false}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + f := initSpecVersionFlag() + parseFlag(t, f, cobraext.FilterSpecVersionFlagName, strings.Join(tt.specVersions, ",")) + + matched := false + for _, pkg := range pkgs { + if f.Matches(pkg.DirName, pkg.Manifest) { + matched = true + break + } + } + assert.Equal(t, tt.wantMatch, matched, "SpecVersion match expectation failed for %v", tt.specVersions) + }) + } +} diff --git a/internal/filter/input.go b/internal/filter/input.go new file mode 100644 index 000000000..d73f7c3d9 --- /dev/null +++ b/internal/filter/input.go @@ -0,0 +1,69 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package filter + +import ( + "github.com/spf13/cobra" + + "github.com/elastic/elastic-package/internal/cobraext" + "github.com/elastic/elastic-package/internal/packages" +) + +type InputFlag struct { + FilterFlagBase + + // flag specific fields + values []string +} + +func (f *InputFlag) Parse(cmd *cobra.Command) error { + input, err := cmd.Flags().GetString(cobraext.FilterInputFlagName) + if err != nil { + return cobraext.FlagParsingError(err, cobraext.FilterInputFlagName) + } + if input == "" { + return nil + } + + f.values = splitAndTrim(input, ",") + f.isApplied = true + return nil +} + +func (f *InputFlag) Validate() error { + return nil +} + +func (f *InputFlag) Matches(dirName string, manifest *packages.PackageManifest) bool { + if f.values != nil { + inputs := extractInputs(manifest) + if !hasAnyMatch(f.values, inputs) { + return false + } + } + return true +} + +func (f *InputFlag) ApplyTo(pkgs []packages.PackageDirNameAndManifest) ([]packages.PackageDirNameAndManifest, error) { + filtered := make([]packages.PackageDirNameAndManifest, 0, len(pkgs)) + + for _, pkg := range pkgs { + if f.Matches(pkg.DirName, pkg.Manifest) { + filtered = append(filtered, pkg) + } + } + return filtered, nil +} + +func initInputFlag() *InputFlag { + return &InputFlag{ + FilterFlagBase: FilterFlagBase{ + name: cobraext.FilterInputFlagName, + description: cobraext.FilterInputFlagDescription, + shorthand: "", + defaultValue: "", + }, + } +} diff --git a/internal/filter/packagedirname.go b/internal/filter/packagedirname.go new file mode 100644 index 000000000..32f4a640e --- /dev/null +++ b/internal/filter/packagedirname.go @@ -0,0 +1,77 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package filter + +import ( + "fmt" + + "github.com/gobwas/glob" + "github.com/spf13/cobra" + + "github.com/elastic/elastic-package/internal/cobraext" + "github.com/elastic/elastic-package/internal/packages" +) + +type PackageDirNameFlag struct { + FilterFlagBase + + patterns []glob.Glob +} + +func (f *PackageDirNameFlag) Parse(cmd *cobra.Command) error { + packageDirNamePatterns, err := cmd.Flags().GetString(cobraext.FilterPackageDirNameFlagName) + if err != nil { + return cobraext.FlagParsingError(err, cobraext.FilterPackageDirNameFlagName) + } + + patterns := splitAndTrim(packageDirNamePatterns, ",") + for _, patternString := range patterns { + pattern, err := glob.Compile(patternString) + if err != nil { + return fmt.Errorf("invalid package dir name pattern: %s: %w", patternString, err) + } + f.patterns = append(f.patterns, pattern) + } + + if len(f.patterns) > 0 { + f.isApplied = true + } + + return nil +} + +func (f *PackageDirNameFlag) Validate() error { + return nil +} + +func (f *PackageDirNameFlag) Matches(dirName string, manifest *packages.PackageManifest) bool { + for _, pattern := range f.patterns { + if pattern.Match(dirName) { + return true + } + } + return false +} + +func (f *PackageDirNameFlag) ApplyTo(pkgs []packages.PackageDirNameAndManifest) ([]packages.PackageDirNameAndManifest, error) { + filtered := make([]packages.PackageDirNameAndManifest, 0, len(pkgs)) + for _, pkg := range pkgs { + if f.Matches(pkg.DirName, pkg.Manifest) { + filtered = append(filtered, pkg) + } + } + return filtered, nil +} + +func initPackageDirNameFlag() *PackageDirNameFlag { + return &PackageDirNameFlag{ + FilterFlagBase: FilterFlagBase{ + name: cobraext.FilterPackageDirNameFlagName, + description: cobraext.FilterPackageDirNameFlagDescription, + shorthand: "", + defaultValue: "", + }, + } +} diff --git a/internal/filter/packagename.go b/internal/filter/packagename.go new file mode 100644 index 000000000..63a415f29 --- /dev/null +++ b/internal/filter/packagename.go @@ -0,0 +1,77 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package filter + +import ( + "fmt" + + "github.com/gobwas/glob" + "github.com/spf13/cobra" + + "github.com/elastic/elastic-package/internal/cobraext" + "github.com/elastic/elastic-package/internal/packages" +) + +type PackageNameFlag struct { + FilterFlagBase + + patterns []glob.Glob +} + +func (f *PackageNameFlag) Parse(cmd *cobra.Command) error { + packageNamePatterns, err := cmd.Flags().GetString(cobraext.FilterPackagesFlagName) + if err != nil { + return cobraext.FlagParsingError(err, cobraext.FilterPackagesFlagName) + } + + patterns := splitAndTrim(packageNamePatterns, ",") + for _, patternString := range patterns { + pattern, err := glob.Compile(patternString) + if err != nil { + return fmt.Errorf("invalid package name pattern: %s: %w", patternString, err) + } + f.patterns = append(f.patterns, pattern) + } + + if len(f.patterns) > 0 { + f.isApplied = true + } + + return nil +} + +func (f *PackageNameFlag) Validate() error { + return nil +} + +func (f *PackageNameFlag) Matches(dirName string, manifest *packages.PackageManifest) bool { + for _, pattern := range f.patterns { + if pattern.Match(manifest.Name) { + return true + } + } + return false +} + +func (f *PackageNameFlag) ApplyTo(pkgs []packages.PackageDirNameAndManifest) ([]packages.PackageDirNameAndManifest, error) { + filtered := make([]packages.PackageDirNameAndManifest, 0, len(pkgs)) + for _, pkg := range pkgs { + if f.Matches(pkg.DirName, pkg.Manifest) { + filtered = append(filtered, pkg) + } + } + return filtered, nil +} + +func initPackageNameFlag() *PackageNameFlag { + return &PackageNameFlag{ + FilterFlagBase: FilterFlagBase{ + name: cobraext.FilterPackagesFlagName, + description: cobraext.FilterPackagesFlagDescription, + shorthand: "", + defaultValue: "", + }, + } +} diff --git a/internal/filter/packagetype.go b/internal/filter/packagetype.go new file mode 100644 index 000000000..d1945a4b2 --- /dev/null +++ b/internal/filter/packagetype.go @@ -0,0 +1,61 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package filter + +import ( + "github.com/spf13/cobra" + + "github.com/elastic/elastic-package/internal/cobraext" + "github.com/elastic/elastic-package/internal/packages" +) + +type PackageTypeFlag struct { + FilterFlagBase + + // flag specific fields + values []string +} + +func (f *PackageTypeFlag) Parse(cmd *cobra.Command) error { + packageTypes, err := cmd.Flags().GetString(cobraext.FilterPackageTypeFlagName) + if err != nil { + return cobraext.FlagParsingError(err, cobraext.FilterPackageTypeFlagName) + } + if packageTypes == "" { + return nil + } + f.values = splitAndTrim(packageTypes, ",") + f.isApplied = true + return nil +} + +func (f *PackageTypeFlag) Validate() error { + return nil +} + +func (f *PackageTypeFlag) Matches(dirName string, manifest *packages.PackageManifest) bool { + return hasAnyMatch(f.values, []string{manifest.Type}) +} + +func (f *PackageTypeFlag) ApplyTo(pkgs []packages.PackageDirNameAndManifest) ([]packages.PackageDirNameAndManifest, error) { + filtered := make([]packages.PackageDirNameAndManifest, 0, len(pkgs)) + for _, pkg := range pkgs { + if f.Matches(pkg.DirName, pkg.Manifest) { + filtered = append(filtered, pkg) + } + } + return filtered, nil +} + +func initPackageTypeFlag() *PackageTypeFlag { + return &PackageTypeFlag{ + FilterFlagBase: FilterFlagBase{ + name: cobraext.FilterPackageTypeFlagName, + description: cobraext.FilterPackageTypeFlagDescription, + shorthand: "", + defaultValue: "", + }, + } +} diff --git a/internal/filter/registry.go b/internal/filter/registry.go new file mode 100644 index 000000000..90b210b3d --- /dev/null +++ b/internal/filter/registry.go @@ -0,0 +1,104 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package filter + +import ( + "fmt" + + "github.com/spf13/cobra" + + "github.com/elastic/elastic-package/internal/cobraext" + "github.com/elastic/elastic-package/internal/logger" + "github.com/elastic/elastic-package/internal/multierror" + "github.com/elastic/elastic-package/internal/packages" +) + +func getFilters() []Filter { + return []Filter{ + initCategoryFlag(), + initCodeOwnerFlag(), + initInputFlag(), + initPackageDirNameFlag(), + initPackageNameFlag(), + initPackageTypeFlag(), + initSpecVersionFlag(), + } +} + +// SetFilterFlags registers all filter flags with the given command. +func SetFilterFlags(cmd *cobra.Command) { + cmd.Flags().IntP(cobraext.FilterDepthFlagName, cobraext.FilterDepthFlagShorthand, cobraext.FilterDepthFlagDefault, cobraext.FilterDepthFlagDescription) + cmd.Flags().StringP(cobraext.FilterExcludeDirFlagName, "", "", cobraext.FilterExcludeDirFlagDescription) + + for _, filter := range getFilters() { + filter.Register(cmd) + } +} + +// FilterRegistry manages a collection of filters for package filtering. +type FilterRegistry struct { + filters []Filter + depth int + excludeDirs string +} + +// NewFilterRegistry creates a new FilterRegistry instance. +func NewFilterRegistry(depth int, excludeDirs string) *FilterRegistry { + return &FilterRegistry{ + filters: []Filter{}, + depth: depth, + excludeDirs: excludeDirs, + } +} + +func (r *FilterRegistry) Parse(cmd *cobra.Command) error { + errs := multierror.Error{} + for _, filter := range getFilters() { + if err := filter.Parse(cmd); err != nil { + errs = append(errs, err) + } + + if filter.IsApplied() { + r.filters = append(r.filters, filter) + } + } + + if errs.Error() != "" { + return fmt.Errorf("error parsing filter options: %s", errs.Error()) + } + + return nil +} + +func (r *FilterRegistry) Validate() error { + for _, filter := range r.filters { + if err := filter.Validate(); err != nil { + return err + } + } + return nil +} + +func (r *FilterRegistry) Execute(currentDir string) (filtered []packages.PackageDirNameAndManifest, errors multierror.Error) { + pkgs, err := packages.ReadAllPackageManifestsFromRepo(currentDir, r.depth, r.excludeDirs) + if err != nil { + return nil, multierror.Error{err} + } + + filtered = pkgs + for _, filter := range r.filters { + filtered, err = filter.ApplyTo(filtered) + if err != nil { + errors = append(errors, err) + } + + if len(filtered) == 0 { + break + } + } + + logger.Infof("Found %d matching package(s)\n", len(filtered)) + return filtered, errors +} diff --git a/internal/filter/registry_test.go b/internal/filter/registry_test.go new file mode 100644 index 000000000..ab8f7a1f3 --- /dev/null +++ b/internal/filter/registry_test.go @@ -0,0 +1,74 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package filter + +import ( + "path/filepath" + "testing" + + "github.com/spf13/cobra" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/elastic/elastic-package/internal/cobraext" +) + +func TestFilterRegistry_Parse(t *testing.T) { + + t.Run("parse valid flags", func(t *testing.T) { + cmd := &cobra.Command{} + SetFilterFlags(cmd) + cmd.Flags().Set(cobraext.FilterCategoriesFlagName, "security") + cmd.Flags().Set(cobraext.FilterInputFlagName, "tcp") + + registry := NewFilterRegistry(2, "") + err := registry.Parse(cmd) + require.NoError(t, err) + assert.Len(t, registry.filters, 2) + }) + + t.Run("parse no flags", func(t *testing.T) { + cmd := &cobra.Command{} + SetFilterFlags(cmd) + + registry := NewFilterRegistry(2, "") + err := registry.Parse(cmd) + require.NoError(t, err) + assert.Empty(t, registry.filters) + }) +} + +func TestFilterRegistry_Validate(t *testing.T) { + t.Run("validate valid filters", func(t *testing.T) { + registry := NewFilterRegistry(2, "") + err := registry.Validate() + assert.NoError(t, err) + }) +} + +func TestFilterRegistry_Execute(t *testing.T) { + // Use real test packages for execution test + testPackagesPath, err := filepath.Abs("../../test/packages") + require.NoError(t, err) + + categoryFlag := initCategoryFlag() + categoryFlag.values = []string{"security"} + + t.Run("execute with real packages", func(t *testing.T) { + registry := NewFilterRegistry(3, "") + + registry.filters = append(registry.filters, categoryFlag) + + filtered, errors := registry.Execute(testPackagesPath) + + // multierror.Error is empty + require.Empty(t, errors) + require.NotEmpty(t, filtered) + + for _, pkg := range filtered { + assert.Contains(t, pkg.Manifest.Categories, "security") + } + }) +} diff --git a/internal/filter/specversion.go b/internal/filter/specversion.go new file mode 100644 index 000000000..c88cc70e6 --- /dev/null +++ b/internal/filter/specversion.go @@ -0,0 +1,75 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package filter + +import ( + "fmt" + + "github.com/Masterminds/semver/v3" + "github.com/spf13/cobra" + + "github.com/elastic/elastic-package/internal/cobraext" + "github.com/elastic/elastic-package/internal/packages" +) + +type SpecVersionFlag struct { + FilterFlagBase + + // package spec version constraint + constraints *semver.Constraints +} + +func (f *SpecVersionFlag) Parse(cmd *cobra.Command) error { + specVersion, err := cmd.Flags().GetString(cobraext.FilterSpecVersionFlagName) + if err != nil { + return cobraext.FlagParsingError(err, cobraext.FilterSpecVersionFlagName) + } + if specVersion == "" { + return nil + } + + f.constraints, err = semver.NewConstraint(specVersion) + if err != nil { + return fmt.Errorf("invalid spec version: %s: %w", specVersion, err) + } + + f.isApplied = true + return nil +} + +func (f *SpecVersionFlag) Validate() error { + // no validation needed for this flag + // checks are done in Parse method + return nil +} + +func (f *SpecVersionFlag) Matches(dirName string, manifest *packages.PackageManifest) bool { + pkgVersion, err := semver.NewVersion(manifest.SpecVersion) + if err != nil { + return false + } + return f.constraints.Check(pkgVersion) +} + +func (f *SpecVersionFlag) ApplyTo(pkgs []packages.PackageDirNameAndManifest) ([]packages.PackageDirNameAndManifest, error) { + filtered := make([]packages.PackageDirNameAndManifest, 0, len(pkgs)) + for _, pkg := range pkgs { + if f.Matches(pkg.DirName, pkg.Manifest) { + filtered = append(filtered, pkg) + } + } + return filtered, nil +} + +func initSpecVersionFlag() *SpecVersionFlag { + return &SpecVersionFlag{ + FilterFlagBase: FilterFlagBase{ + name: cobraext.FilterSpecVersionFlagName, + description: cobraext.FilterSpecVersionFlagDescription, + shorthand: "", + defaultValue: "", + }, + } +} diff --git a/internal/filter/type.go b/internal/filter/type.go new file mode 100644 index 000000000..646f72d2c --- /dev/null +++ b/internal/filter/type.go @@ -0,0 +1,148 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package filter + +import ( + "encoding/json" + "fmt" + "slices" + "strings" + + "github.com/spf13/cobra" + "go.yaml.in/yaml/v2" + + "github.com/elastic/elastic-package/internal/packages" +) + +// OutputOptions handles both what information to display and how to format it. +type OutputOptions struct { + infoType string // "pkgname", "dirname", "absolute" + format string // "json", "yaml", "" +} + +// NewOutputOptions creates a new OutputOptions from string parameters. +func NewOutputOptions(infoType, format string) (*OutputOptions, error) { + cfg := &OutputOptions{ + infoType: infoType, + format: format, + } + if err := cfg.validate(); err != nil { + return nil, err + } + return cfg, nil +} + +func (o *OutputOptions) validate() error { + validInfo := []string{"pkgname", "dirname", "absolute"} + validFormats := []string{"json", "yaml", ""} + + if !slices.Contains(validInfo, o.infoType) { + return fmt.Errorf("invalid output info type: %s (valid: pkgname, dirname, absolute)", o.infoType) + } + if !slices.Contains(validFormats, o.format) { + return fmt.Errorf("invalid output format: %s (valid: json, yaml, or empty)", o.format) + } + + return nil +} + +// ApplyTo applies the output configuration to packages and returns formatted output. +func (o *OutputOptions) ApplyTo(pkgs []packages.PackageDirNameAndManifest) (string, error) { + if len(pkgs) == 0 { + return "", nil + } + + values, err := o.extractInfo(pkgs) + if err != nil { + return "", fmt.Errorf("extracting info failed: %w", err) + } + + // Format output + return o.formatOutput(values) +} + +func (o *OutputOptions) extractInfo(pkgs []packages.PackageDirNameAndManifest) ([]string, error) { + + // Extract information + values := make([]string, 0, len(pkgs)) + for _, pkg := range pkgs { + var val string + switch o.infoType { + case "pkgname": + val = pkg.Manifest.Name + case "dirname": + val = pkg.DirName + case "absolute": + val = pkg.Path + } + values = append(values, val) + } + + // Sort for consistent output + slices.Sort(values) + + return values, nil +} + +func (o *OutputOptions) formatOutput(values []string) (string, error) { + switch o.format { + case "": + return strings.Join(values, "\n"), nil + case "json": + data, err := json.Marshal(values) + if err != nil { + return "", fmt.Errorf("failed to marshal to JSON: %w", err) + } + return string(data), nil + case "yaml": + data, err := yaml.Marshal(values) + if err != nil { + return "", fmt.Errorf("failed to marshal to YAML: %w", err) + } + return string(data), nil + default: + return "", fmt.Errorf("unsupported format: %s", o.format) + } +} + +// FilterFlag defines the basic interface for filter flags. +type FilterFlag interface { + String() string + Register(cmd *cobra.Command) + IsApplied() bool +} + +// Filter extends FilterFlag with filtering capabilities. +// It defines the interface for filtering packages based on specific criteria. +type Filter interface { + FilterFlag + Parse(cmd *cobra.Command) error + Validate() error + ApplyTo(pkgs []packages.PackageDirNameAndManifest) ([]packages.PackageDirNameAndManifest, error) + // Matches checks if a package matches the filter criteria. + // dirName is the directory name of the package in package root. + Matches(dirName string, manifest *packages.PackageManifest) bool +} + +// FilterFlagBase provides common functionality for filter flags. +type FilterFlagBase struct { + name string + description string + shorthand string + defaultValue string + isApplied bool +} + +func (f *FilterFlagBase) String() string { + return fmt.Sprintf("name=%s defaultValue=%s applied=%v", f.name, f.defaultValue, f.isApplied) +} + +func (f *FilterFlagBase) Register(cmd *cobra.Command) { + cmd.Flags().StringP(f.name, f.shorthand, f.defaultValue, f.description) +} + +func (f *FilterFlagBase) IsApplied() bool { + return f.isApplied +} diff --git a/internal/filter/type_test.go b/internal/filter/type_test.go new file mode 100644 index 000000000..aab88b633 --- /dev/null +++ b/internal/filter/type_test.go @@ -0,0 +1,97 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package filter + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/elastic/elastic-package/internal/packages" +) + +func TestOutputOptions_NewOutputOptions(t *testing.T) { + tests := []struct { + name string + infoType string + format string + wantErr bool + }{ + {"valid defaults", "pkgname", "", false}, + {"valid json", "dirname", "json", false}, + {"valid yaml", "absolute", "yaml", false}, + {"invalid info type", "invalid", "", true}, + {"invalid format", "pkgname", "invalid", true}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + _, err := NewOutputOptions(tt.infoType, tt.format) + if tt.wantErr { + assert.Error(t, err) + } else { + assert.NoError(t, err) + } + }) + } +} + +func TestOutputOptions_ApplyTo(t *testing.T) { + pkgs := []packages.PackageDirNameAndManifest{ + { + DirName: "package1", + Path: "/path/to/package1", + Manifest: &packages.PackageManifest{ + Name: "package_one", + }, + }, + { + DirName: "package2", + Path: "/path/to/package2", + Manifest: &packages.PackageManifest{ + Name: "package_two", + }, + }, + } + + t.Run("pkgname output", func(t *testing.T) { + opts, _ := NewOutputOptions("pkgname", "") + out, err := opts.ApplyTo(pkgs) + require.NoError(t, err) + assert.Contains(t, out, "package_one") + assert.Contains(t, out, "package_two") + }) + + t.Run("dirname output", func(t *testing.T) { + opts, _ := NewOutputOptions("dirname", "") + out, err := opts.ApplyTo(pkgs) + require.NoError(t, err) + assert.Contains(t, out, "package1") + assert.Contains(t, out, "package2") + }) + + t.Run("json format", func(t *testing.T) { + opts, _ := NewOutputOptions("pkgname", "json") + out, err := opts.ApplyTo(pkgs) + require.NoError(t, err) + assert.Contains(t, out, `["package_one","package_two"]`) + }) + + t.Run("yaml format", func(t *testing.T) { + opts, _ := NewOutputOptions("pkgname", "yaml") + out, err := opts.ApplyTo(pkgs) + require.NoError(t, err) + assert.Contains(t, out, "- package_one\n- package_two") + }) + + t.Run("absolute format", func(t *testing.T) { + opts, _ := NewOutputOptions("absolute", "") + out, err := opts.ApplyTo(pkgs) + require.NoError(t, err) + assert.Contains(t, out, "/path/to/package1") + assert.Contains(t, out, "/path/to/package2") + }) +} diff --git a/internal/filter/utils.go b/internal/filter/utils.go new file mode 100644 index 000000000..d1b416a01 --- /dev/null +++ b/internal/filter/utils.go @@ -0,0 +1,64 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package filter + +import ( + "slices" + "strings" + + "github.com/elastic/elastic-package/internal/packages" +) + +// splitAndTrim splits a string by delimiter and trims whitespace from each element +func splitAndTrim(s, delimiter string) []string { + if s == "" { + return nil + } + parts := strings.Split(s, delimiter) + result := make([]string, 0, len(parts)) + for _, part := range parts { + trimmed := strings.TrimSpace(part) + if trimmed != "" { + result = append(result, trimmed) + } + } + return result +} + +// hasAnyMatch checks if any item in the items slice exists in the filters slice +func hasAnyMatch(filters []string, items []string) bool { + if len(filters) == 0 { + return true + } + + for _, item := range items { + if slices.Contains(filters, item) { + return true + } + } + + return false +} + +// extractInputs extracts all input types from package policy templates +func extractInputs(manifest *packages.PackageManifest) []string { + uniqueInputs := make(map[string]struct{}) + for _, policyTemplate := range manifest.PolicyTemplates { + if policyTemplate.Input != "" { + uniqueInputs[policyTemplate.Input] = struct{}{} + } + + for _, input := range policyTemplate.Inputs { + uniqueInputs[input.Type] = struct{}{} + } + } + + inputs := make([]string, 0, len(uniqueInputs)) + for input := range uniqueInputs { + inputs = append(inputs, input) + } + + return inputs +} diff --git a/internal/filter/utils_test.go b/internal/filter/utils_test.go new file mode 100644 index 000000000..22ae1fc8c --- /dev/null +++ b/internal/filter/utils_test.go @@ -0,0 +1,70 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package filter + +import ( + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/elastic/elastic-package/internal/packages" +) + +func TestSplitAndTrim(t *testing.T) { + tests := []struct { + name string + input string + delimiter string + want []string + }{ + {"simple split", "a,b,c", ",", []string{"a", "b", "c"}}, + {"with spaces", " a , b , c ", ",", []string{"a", "b", "c"}}, + {"empty string", "", ",", nil}, + {"only delimiters", ",,", ",", []string{}}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := splitAndTrim(tt.input, tt.delimiter) + assert.Equal(t, tt.want, got) + }) + } +} + +func TestHasAnyMatch(t *testing.T) { + tests := []struct { + name string + filters []string + items []string + want bool + }{ + {"match found", []string{"a"}, []string{"a", "b"}, true}, + {"no match", []string{"c"}, []string{"a", "b"}, false}, + {"empty filters (match all)", []string{}, []string{"a", "b"}, true}, + {"empty items", []string{"a"}, []string{}, false}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := hasAnyMatch(tt.filters, tt.items) + assert.Equal(t, tt.want, got) + }) + } +} + +func TestExtractInputs(t *testing.T) { + manifest := &packages.PackageManifest{ + PolicyTemplates: []packages.PolicyTemplate{ + {Input: "input1"}, + {Inputs: []packages.Input{{Type: "input2"}, {Type: "input3"}}}, + }, + } + + got := extractInputs(manifest) + assert.Contains(t, got, "input1") + assert.Contains(t, got, "input2") + assert.Contains(t, got, "input3") + assert.Len(t, got, 3) +} diff --git a/internal/packages/packages.go b/internal/packages/packages.go index 1cdd15710..5f206487f 100644 --- a/internal/packages/packages.go +++ b/internal/packages/packages.go @@ -21,6 +21,8 @@ import ( "github.com/elastic/go-ucfg" "github.com/elastic/go-ucfg/yaml" + + "github.com/elastic/elastic-package/internal/logger" ) const ( @@ -201,6 +203,12 @@ type PackageManifest struct { Elasticsearch *Elasticsearch `config:"elasticsearch" json:"elasticsearch" yaml:"elasticsearch"` } +type PackageDirNameAndManifest struct { + DirName string + Path string + Manifest *PackageManifest +} + type ManifestIndexTemplate struct { IngestPipeline *ManifestIngestPipeline `config:"ingest_pipeline" json:"ingest_pipeline" yaml:"ingest_pipeline"` Mappings *ManifestMappings `config:"mappings" json:"mappings" yaml:"mappings"` @@ -422,6 +430,89 @@ func ReadPackageManifest(path string) (*PackageManifest, error) { return &m, nil } +// ReadAllPackageManifestsFromRepo reads all the package manifests in the given directory. +// It recursively searches for manifest.yml files up to the specified depth. +// - depth: maximum depth to search (1 = current dir + immediate sub dirs) +// - excludeDirs: comma-separated list of directory names to exclude (always excludes .git) +func ReadAllPackageManifestsFromRepo(searchRoot string, depth int, excludeDirs string) ([]PackageDirNameAndManifest, error) { + // Parse exclude directories + excludeMap := map[string]bool{ + ".git": true, // Always exclude .git + "build": true, // Always exclude build + } + + if excludeDirs != "" { + for dir := range strings.SplitSeq(excludeDirs, ",") { + excludeMap[strings.TrimSpace(dir)] = true + } + } + + var packages []PackageDirNameAndManifest + searchRootDepth := strings.Count(searchRoot, string(filepath.Separator)) + + err := filepath.WalkDir(searchRoot, func(path string, d fs.DirEntry, err error) error { + if err != nil { + return err + } + + // Calculate current depth relative to search root + currentDepth := strings.Count(path, string(filepath.Separator)) - searchRootDepth + + // If it's a directory, check if we should skip it + if d.IsDir() { + dirName := d.Name() + + // Skip excluded directories (but not the search root) + if excludeMap[dirName] && searchRoot != path { + return filepath.SkipDir + } + + // Skip if we've exceeded the depth limit (but allow processing the current level) + if currentDepth > depth { + return filepath.SkipDir + } + + return nil + } + + // Check if this is a manifest file + if d.Name() != PackageManifestFile { + return nil + } + + // Validate it's a package manifest + ok, err := isPackageManifest(path) + if err != nil { + logger.Debugf("failed to validate package manifest (path: %s): %v", path, err) + return nil + } + if !ok { + return nil + } + + // Extract directory name (just the package directory name, not the full path) + dirName := filepath.Base(filepath.Dir(path)) + manifest, err := ReadPackageManifest(path) + if err != nil { + return fmt.Errorf("failed to read package manifest (path: %s): %w", path, err) + } + + packages = append(packages, PackageDirNameAndManifest{ + DirName: dirName, + Manifest: manifest, + Path: filepath.Dir(path), + }) + + return nil + }) + + if err != nil { + return nil, fmt.Errorf("failed walking directory tree: %w", err) + } + + return packages, nil +} + // ReadTransformDefinitionFile reads and parses the transform definition (elasticsearch/transform//transform.yml) // file for the given transform. It also applies templating to the file, allowing to set the final ingest pipeline name // by adding the package version defined in the package manifest.