From 2d269e0d890d1b675d866fe6616df5d17e73cabd Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 6 Oct 2025 01:38:21 +0000 Subject: [PATCH] chore(deps): bump github.com/hashicorp/vault/sdk from 0.19.0 to 0.20.0 Bumps [github.com/hashicorp/vault/sdk](https://github.com/hashicorp/vault) from 0.19.0 to 0.20.0. - [Release notes](https://github.com/hashicorp/vault/releases) - [Changelog](https://github.com/hashicorp/vault/blob/main/CHANGELOG-v1.10-v1.15.md) - [Commits](https://github.com/hashicorp/vault/compare/sdk/v0.19.0...sdk/v0.20.0) --- updated-dependencies: - dependency-name: github.com/hashicorp/vault/sdk dependency-version: 0.20.0 dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- go.mod | 2 +- go.sum | 4 +- .../vault/sdk/database/helper/connutil/sql.go | 4 + .../vault/sdk/helper/jsonutil/json.go | 499 ++++++++++++++---- .../sdk/logical/pki_cert_count_system_view.go | 33 ++ .../vault/sdk/logical/system_view.go | 11 + vendor/modules.txt | 2 +- 7 files changed, 461 insertions(+), 94 deletions(-) create mode 100644 vendor/github.com/hashicorp/vault/sdk/logical/pki_cert_count_system_view.go diff --git a/go.mod b/go.mod index 497cd62..1d49aa5 100644 --- a/go.mod +++ b/go.mod @@ -5,7 +5,7 @@ go 1.25.0 require ( github.com/hashicorp/go-uuid v1.0.3 github.com/hashicorp/vault/api v1.21.0 - github.com/hashicorp/vault/sdk v0.19.0 + github.com/hashicorp/vault/sdk v0.20.0 github.com/okta/okta-sdk-golang/v5 v5.0.6 ) diff --git a/go.sum b/go.sum index 6f02e39..8934375 100644 --- a/go.sum +++ b/go.sum @@ -217,8 +217,8 @@ github.com/hashicorp/hcl v1.0.1-vault-7 h1:ag5OxFVy3QYTFTJODRzTKVZ6xvdfLLCA1cy/Y github.com/hashicorp/hcl v1.0.1-vault-7/go.mod h1:XYhtn6ijBSAj6n4YqAaf7RBPS4I06AItNorpy+MoQNM= github.com/hashicorp/vault/api v1.21.0 h1:Xej4LJETV/spWRdjreb2vzQhEZt4+B5yxHAObfQVDOs= github.com/hashicorp/vault/api v1.21.0/go.mod h1:IUZA2cDvr4Ok3+NtK2Oq/r+lJeXkeCrHRmqdyWfpmGM= -github.com/hashicorp/vault/sdk v0.19.0 h1:cpjxJ5qnEEX7xtVXaFpXpqfiFTs2hzyQiHS7oJRrvMM= -github.com/hashicorp/vault/sdk v0.19.0/go.mod h1:IYPuA9rZdJjmvssaRWhqs6XQNH6g6XBLjIxOdOVYVrM= +github.com/hashicorp/vault/sdk v0.20.0 h1:a4ulj2gICzw/qH0A4+6o36qAHxkUdcmgpMaSSjqE3dc= +github.com/hashicorp/vault/sdk v0.20.0/go.mod h1:xEjAt/n/2lHBAkYiRPRmvf1d5B6HlisPh2pELlRCosk= github.com/hashicorp/yamux v0.1.2 h1:XtB8kyFOyHXYVFnwT5C3+Bdo8gArse7j2AQ0DA0Uey8= github.com/hashicorp/yamux v0.1.2/go.mod h1:C+zze2n6e/7wshOZep2A70/aQU6QBRWJO/G6FT1wIns= github.com/jackc/chunkreader v1.0.0/go.mod h1:RT6O25fNZIuasFJRyZ4R/Y2BbhasbmZXF9QQ7T3kePo= diff --git a/vendor/github.com/hashicorp/vault/sdk/database/helper/connutil/sql.go b/vendor/github.com/hashicorp/vault/sdk/database/helper/connutil/sql.go index 131b3b7..65fbd15 100644 --- a/vendor/github.com/hashicorp/vault/sdk/database/helper/connutil/sql.go +++ b/vendor/github.com/hashicorp/vault/sdk/database/helper/connutil/sql.go @@ -143,6 +143,7 @@ func (c *SQLConnectionProducer) Init(ctx context.Context, conf map[string]interf var username string var password string + if !c.SelfManaged { // Default behavior username = c.Username @@ -154,6 +155,9 @@ func (c *SQLConnectionProducer) Init(ctx context.Context, conf map[string]interf if !c.DisableEscaping { username = url.PathEscape(c.Username) } + + // The exception for MySQL passwords specifically comes from https://github.com/hashicorp/vault/issues/7834 + // Due, presumably, to uneveness in the way different sql engines handle the pseudo-URLs of DSNs. if (c.Type != "mysql") && !c.DisableEscaping { password = url.PathEscape(c.Password) } diff --git a/vendor/github.com/hashicorp/vault/sdk/helper/jsonutil/json.go b/vendor/github.com/hashicorp/vault/sdk/helper/jsonutil/json.go index 76c84ae..8613d9f 100644 --- a/vendor/github.com/hashicorp/vault/sdk/helper/jsonutil/json.go +++ b/vendor/github.com/hashicorp/vault/sdk/helper/jsonutil/json.go @@ -10,6 +10,9 @@ import ( "encoding/json" "fmt" "io" + "strconv" + "strings" + "unicode" "github.com/hashicorp/errwrap" "github.com/hashicorp/vault/sdk/helper/compressutil" @@ -105,8 +108,18 @@ func DecodeJSONFromReader(r io.Reader, out interface{}) error { // containerState holds information about an open JSON container (object or array). type containerState struct { - Type json.Delim // '{' or '[' - Count int // Number of entries (for objects) or elements for arrays) + // '{' or '[' + Type json.Delim + + // Number of entries (for objects) or elements for arrays) + Count int + + // isKey is true if the next expected token in an object is a key. + isKey bool + + // keys tracks the keys seen in an object to detect duplicates. + // It is only initialized for objects ('{'). + keys map[string]struct{} } // JSONLimits defines the configurable limits for JSON validation. @@ -115,127 +128,259 @@ type JSONLimits struct { MaxStringValueLength int MaxObjectEntryCount int MaxArrayElementCount int + MaxTokens int } // isWhitespace checks if a byte is a JSON whitespace character. func isWhitespace(b byte) bool { - return b == ' ' || b == '\t' || b == '\n' || b == '\r' + // Standard JSON whitespace characters (RFC 8259) + if b == ' ' || b == '\t' || b == '\n' || b == '\r' { + return true + } + // Custom support for non-standard Unit Separator character (ASCII 31) + if b == 31 || b == 139 { + return true + } + return false } -// VerifyMaxDepthStreaming scans the JSON stream to determine its maximum nesting depth -// and enforce various limits. It first checks if the stream is likely JSON before proceeding. -func VerifyMaxDepthStreaming(jsonReader io.Reader, limits JSONLimits) (int, error) { - // Use a buffered reader to peek at the stream without consuming it from the original reader. - bufReader := bufio.NewReader(jsonReader) +// defaultBufferSize is the default size for the bufio.Reader buffer +const defaultBufferSize = int64(4096) - // Find the first non-whitespace character. - var firstByte byte - var err error - for { - firstByte, err = bufReader.ReadByte() - if err != nil { - // If we hit EOF before finding a real character, it's an empty or whitespace-only payload. - if err == io.EOF { - return 0, nil - } - return 0, err // A different I/O error occurred. - } - if !isWhitespace(firstByte) { - break // Found the first significant character. - } +// VerifyMaxDepthStreaming scans the JSON stream to enforce nesting depth, counts, +// and other limits without decoding the full structure into memory. +func VerifyMaxDepthStreaming(jsonReader io.Reader, limits JSONLimits, maxRequestSize *int64) (int, error) { + // If the default buffer size is larger than the max request size, use the max request size + // for the buffer to avoid over-reading. + bufferSize := defaultBufferSize + if maxRequestSize != nil && *maxRequestSize < defaultBufferSize { + bufferSize = *maxRequestSize } + // Use a buffered reader to peek at the stream + bufReader := bufio.NewReaderSize(jsonReader, int(bufferSize)) - // If the payload doesn't start with '{' or '[', assume it's not a JSON object or array - // and that our limits do not apply. - if firstByte != '{' && firstByte != '[' { - return 0, nil + bom, err := bufReader.Peek(3) + if err == nil && bytes.Equal(bom, []byte{0xEF, 0xBB, 0xBF}) { + _, _ = bufReader.Discard(3) } - fullStreamReader := io.MultiReader(bytes.NewReader([]byte{firstByte}), bufReader) - decoder := json.NewDecoder(fullStreamReader) - decoder.UseNumber() - + // We use a manual token loop instead of json.Decoder to gain low-level + // control over the stream. This is necessary to fix a vulnerability where + // the raw byte length of strings with escape sequences was not correctly limited. var ( - maxDepth = 0 - currentDepth = 0 - isKeyExpected bool + maxDepth int + currentDepth int + tokenCount int + lastTokenWasComma bool ) containerInfoStack := make([]containerState, 0, limits.MaxDepth) - for { - t, err := decoder.Token() + // Prime the loop by finding the first non-whitespace character. + if err := skipWhitespace(bufReader); err != nil { if err == io.EOF { - break + // An empty payload or one with only whitespace is valid. Skip verification. + return 0, nil } + return 0, err + } + + b, err := bufReader.Peek(1) + if err != nil { + // This can happen if there's an I/O error after skipping whitespace. + return 0, err + } + + // If the payload doesn't start with a JSON container ('{' or '['), skip + // verification. The limits are intended for structured data, not primitives + // or other formats. + if b[0] != '{' && b[0] != '[' { + return 0, nil + } + + for { + // Check for EOF before peeking. + b, err := bufReader.Peek(1) + // Any error from the decoder is now considered a real error. if err != nil { - // Any error from the decoder is now considered a real error. + if err == io.EOF { + break + } return 0, fmt.Errorf("error reading JSON token: %w", err) } - switch v := t.(type) { - case json.Delim: - switch v { - case '{', '[': - currentDepth++ - // Check against the limit directly. - if currentDepth > limits.MaxDepth { - return 0, fmt.Errorf("JSON input exceeds allowed nesting depth") + // If the last token was a comma, the next token cannot be a closing delimiter. + if lastTokenWasComma && (b[0] == '}' || b[0] == ']') { + if b[0] == '}' { + return 0, fmt.Errorf("invalid character '}' after object key-value pair") + } + return 0, fmt.Errorf("invalid character ']' after array element") + } + + // After a top-level value, any other character is an error. + if len(containerInfoStack) == 0 && maxDepth > 0 { + return 0, fmt.Errorf("invalid character '%c' after top-level value", b[0]) + } + + // Increment and check the total token count limit. + if limits.MaxTokens > 0 { + tokenCount++ + if tokenCount > limits.MaxTokens { + return 0, fmt.Errorf("JSON payload exceeds allowed token count") + } + } + + var currentContainer *containerState + if len(containerInfoStack) > 0 { + currentContainer = &containerInfoStack[len(containerInfoStack)-1] + } + + // Before processing the token, reset the comma flag. It will be set + // again below if the current token is a comma. + lastTokenWasComma = false + + switch b[0] { + case '{', '[': + delim, _ := bufReader.ReadByte() + if currentContainer != nil { + if currentContainer.Type == '[' { + currentContainer.Count++ + if currentContainer.Count > limits.MaxArrayElementCount { + return 0, fmt.Errorf("JSON array exceeds allowed element count") + } + } else { + currentContainer.isKey = true } - if currentDepth > maxDepth { - maxDepth = currentDepth + } + + // Handle depth checks and tracking together for clarity. + currentDepth++ + if currentDepth > maxDepth { + maxDepth = currentDepth + } + // Check depth limit immediately after incrementing. + if limits.MaxDepth > 0 && currentDepth > limits.MaxDepth { + return 0, fmt.Errorf("JSON input exceeds allowed nesting depth") + } + + // For objects, initialize a map to track keys and prevent duplicates. + var keys map[string]struct{} + if delim == '{' { + keys = make(map[string]struct{}) + } + + containerInfoStack = append(containerInfoStack, containerState{Type: json.Delim(delim), isKey: delim == '{', keys: keys}) + + case '}', ']': + // A closing brace cannot follow a colon without a value. + if currentContainer != nil && currentContainer.Type == '{' && !currentContainer.isKey { + return 0, fmt.Errorf("invalid character '}' after object key") + } + delim, _ := bufReader.ReadByte() + if currentContainer == nil { + return 0, fmt.Errorf("malformed JSON: unmatched closing delimiter '%c'", delim) + } + if (delim == '}' && currentContainer.Type != '{') || (delim == ']' && currentContainer.Type != '[') { + return 0, fmt.Errorf("malformed JSON: mismatched closing delimiter '%c'", delim) + } + containerInfoStack = containerInfoStack[:len(containerInfoStack)-1] + currentDepth-- + if len(containerInfoStack) > 0 && containerInfoStack[len(containerInfoStack)-1].Type == '{' { + containerInfoStack[len(containerInfoStack)-1].isKey = true + } + + case '"': + // Manually scan the string to count its raw byte length and get the value. + val, err := scanString(bufReader, limits.MaxStringValueLength) + if err != nil { + return 0, err + } + + if currentContainer == nil { + if maxDepth == 0 { + maxDepth = 1 } + break + } - containerInfoStack = append(containerInfoStack, containerState{Type: v, Count: 0}) - if v == '{' { - isKeyExpected = true + if currentContainer.Type == '{' { + if currentContainer.isKey { + // Check for duplicate keys. + if _, ok := currentContainer.keys[val]; ok { + return 0, fmt.Errorf("duplicate key '%s' in object", val) + } + currentContainer.keys[val] = struct{}{} + + currentContainer.Count++ + if currentContainer.Count > limits.MaxObjectEntryCount { + return 0, fmt.Errorf("JSON object exceeds allowed entry count") + } + currentContainer.isKey = false + } else { + currentContainer.isKey = true } - case '}', ']': - if len(containerInfoStack) == 0 { - return 0, fmt.Errorf("malformed JSON: unmatched closing delimiter '%c'", v) + } else { + currentContainer.Count++ + if currentContainer.Count > limits.MaxArrayElementCount { + return 0, fmt.Errorf("JSON array exceeds allowed element count") } - top := containerInfoStack[len(containerInfoStack)-1] - containerInfoStack = containerInfoStack[:len(containerInfoStack)-1] - currentDepth-- - if (v == '}' && top.Type != '{') || (v == ']' && top.Type != '[') { - return 0, fmt.Errorf("malformed JSON: mismatched closing delimiter '%c' for opening '%c'", v, top.Type) + } + + case 't', 'f', 'n': // true, false, null + if err := scanLiteral(bufReader); err != nil { + return 0, err + } + if currentContainer == nil { + if maxDepth == 0 { + maxDepth = 1 } - if len(containerInfoStack) > 0 && containerInfoStack[len(containerInfoStack)-1].Type == '{' { - isKeyExpected = false + break + } + if currentContainer.Type == '[' { + currentContainer.Count++ + if currentContainer.Count > limits.MaxArrayElementCount { + return 0, fmt.Errorf("JSON array exceeds allowed element count") } + } else { + currentContainer.isKey = true } - case string: - if len(v) > limits.MaxStringValueLength { - return 0, fmt.Errorf("JSON string value exceeds allowed length") - } - if len(containerInfoStack) > 0 { - top := &containerInfoStack[len(containerInfoStack)-1] - if top.Type == '{' { - if isKeyExpected { - top.Count++ - if top.Count > limits.MaxObjectEntryCount { - return 0, fmt.Errorf("JSON object exceeds allowed entry count") - } - isKeyExpected = false - } - } else if top.Type == '[' { - top.Count++ - if top.Count > limits.MaxArrayElementCount { - return 0, fmt.Errorf("JSON array exceeds allowed element count") - } + case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': + if err := scanNumber(bufReader, limits.MaxStringValueLength); err != nil { + return 0, err + } + if currentContainer == nil { + if maxDepth == 0 { + maxDepth = 1 } + break } - default: // Handles numbers, booleans, and nulls - if len(containerInfoStack) > 0 { - top := &containerInfoStack[len(containerInfoStack)-1] - if top.Type == '[' { - top.Count++ - if top.Count > limits.MaxArrayElementCount { - return 0, fmt.Errorf("JSON array exceeds allowed element count") - } - } else if top.Type == '{' { - isKeyExpected = true + if currentContainer.Type == '[' { + currentContainer.Count++ + if currentContainer.Count > limits.MaxArrayElementCount { + return 0, fmt.Errorf("JSON array exceeds allowed element count") } + } else { + currentContainer.isKey = true + } + + case ',': + _, _ = bufReader.ReadByte() + lastTokenWasComma = true + if currentContainer != nil && currentContainer.Type == '{' { + currentContainer.isKey = true + } + + case ':': + _, _ = bufReader.ReadByte() + + default: + return 0, fmt.Errorf("invalid character '%c' looking for beginning of value", b[0]) + } + + if err := skipWhitespace(bufReader); err != nil { + if err == io.EOF { + break } + return 0, err } } @@ -245,3 +390,177 @@ func VerifyMaxDepthStreaming(jsonReader io.Reader, limits JSONLimits) (int, erro return maxDepth, nil } + +func skipWhitespace(r *bufio.Reader) error { + for { + b, err := r.Peek(1) + if err != nil { + return err + } + if !isWhitespace(b[0]) { + return nil + } + _, _ = r.ReadByte() + } +} + +// scanString consumes a JSON string from the reader, ensuring the raw byte +// length of its content does not exceed the limit. It returns the unescaped +// string value. +func scanString(r *bufio.Reader, limit int) (string, error) { + if b, _ := r.ReadByte(); b != '"' { + return "", fmt.Errorf("expected string") + } + + var builder strings.Builder + contentByteCount := 0 + var lastRune rune = -1 // Track the last rune for surrogate pair validation. + + for { + b, err := r.ReadByte() + if err != nil { + if err == io.EOF { + return "", fmt.Errorf("malformed JSON, unclosed string") + } + return "", err + } + + if b == '"' { + // Before successfully returning, ensure we didn't end on an unpaired high surrogate. + if lastRune >= 0xD800 && lastRune <= 0xDBFF { + return "", fmt.Errorf("malformed JSON, unterminated surrogate pair in string") + } + return builder.String(), nil + } + + contentByteCount++ + lastRune = -1 // Reset unless we parse a new rune. + + if b == '\\' { + escaped, err := r.ReadByte() + if err != nil { + return "", fmt.Errorf("malformed JSON, unterminated escape sequence in string") + } + contentByteCount++ + + switch escaped { + case '"', '\\', '/': + builder.WriteByte(escaped) + case 'b': + builder.WriteByte('\b') + case 'f': + builder.WriteByte('\f') + case 'n': + builder.WriteByte('\n') + case 'r': + builder.WriteByte('\r') + case 't': + builder.WriteByte('\t') + case 'u': + hexChars := make([]byte, 4) + if _, err := io.ReadFull(r, hexChars); err != nil { + return "", fmt.Errorf("malformed JSON, unterminated unicode escape in string") + } + contentByteCount += 4 + + hexStr := string(hexChars) + for _, char := range hexStr { + if !((char >= '0' && char <= '9') || (char >= 'a' && char <= 'f') || (char >= 'A' && char <= 'F')) { + return "", fmt.Errorf("invalid character '%c' in string escape code", char) + } + } + + code, _ := strconv.ParseUint(hexStr, 16, 32) + if code > unicode.MaxRune { + return "", fmt.Errorf("invalid unicode escape sequence: value out of range") + } + + r := rune(code) + builder.WriteRune(r) + lastRune = r + default: + return "", fmt.Errorf("invalid character '%c' in string escape code", escaped) + } + } else { + builder.WriteByte(b) + } + + if limit > 0 && contentByteCount > limit { + return "", fmt.Errorf("JSON string value exceeds allowed length") + } + } +} + +func scanLiteral(r *bufio.Reader) error { + for { + b, err := r.Peek(1) + if err != nil { + // If we hit EOF after reading part of a literal, that's a clean end. + if err == io.EOF { + break + } + return err + } + if isWhitespace(b[0]) || b[0] == ',' || b[0] == '}' || b[0] == ']' { + return nil + } + _, _ = r.ReadByte() + } + return nil +} + +func scanNumber(r *bufio.Reader, limit int) error { + var builder strings.Builder + byteCount := 0 + + // Peek at the first char to check for leading zero issues. + peeked, err := r.Peek(2) + if err == nil && len(peeked) > 1 { + if peeked[0] == '0' && peeked[1] >= '0' && peeked[1] <= '9' { + _, _ = r.ReadByte() + return fmt.Errorf("invalid character '%c' after top-level value", peeked[1]) + } + } + + for { + b, err := r.Peek(1) + if err != nil { + if err == io.EOF { + break + } + return err + } + + char := b[0] + if isWhitespace(char) || char == ',' || char == '}' || char == ']' { + break + } + + // A number token can contain only these characters. + isNumPart := (char >= '0' && char <= '9') || char == '.' || char == 'e' || char == 'E' || char == '+' || char == '-' + if !isNumPart { + // If it's not a valid number character, we stop scanning. + break + } + + _, _ = r.ReadByte() + builder.WriteByte(char) + byteCount++ + if limit > 0 && byteCount > limit { + return fmt.Errorf("JSON number value exceeds allowed length") + } + } + + if byteCount == 0 { + return fmt.Errorf("malformed JSON, empty number") + } + + // Use the standard library for a final, strict validation of the number's syntax. + // This correctly rejects malformed inputs like "-" or "123.". + numStr := builder.String() + if _, err := strconv.ParseFloat(numStr, 64); err != nil { + return fmt.Errorf("malformed JSON, invalid number syntax for '%s'", numStr) + } + + return nil +} diff --git a/vendor/github.com/hashicorp/vault/sdk/logical/pki_cert_count_system_view.go b/vendor/github.com/hashicorp/vault/sdk/logical/pki_cert_count_system_view.go new file mode 100644 index 0000000..3434b1f --- /dev/null +++ b/vendor/github.com/hashicorp/vault/sdk/logical/pki_cert_count_system_view.go @@ -0,0 +1,33 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package logical + +// PkiCertificateCounter is an interface for incrementing the count of issued and stored +// PKI certificates. +type PkiCertificateCounter interface { + // IncrementCount increments the count of issued and stored certificates. + IncrementCount(issuedCerts, storedCerts uint64) + + // AddIssuedCertificate increments the issued certificate count by 1, and also the + // stored certificate count if stored is true. + AddIssuedCertificate(stored bool) +} + +type PkiCertificateCountSystemView interface { + GetPkiCertificateCounter() PkiCertificateCounter +} + +type nullPkiCertificateCounter struct{} + +func (n *nullPkiCertificateCounter) IncrementCount(_, _ uint64) { +} + +func (n *nullPkiCertificateCounter) AddIssuedCertificate(_ bool) { +} + +var _ PkiCertificateCounter = (*nullPkiCertificateCounter)(nil) + +func NewNullPkiCertificateCounter() PkiCertificateCounter { + return &nullPkiCertificateCounter{} +} diff --git a/vendor/github.com/hashicorp/vault/sdk/logical/system_view.go b/vendor/github.com/hashicorp/vault/sdk/logical/system_view.go index f0aa22e..6aafcd1 100644 --- a/vendor/github.com/hashicorp/vault/sdk/logical/system_view.go +++ b/vendor/github.com/hashicorp/vault/sdk/logical/system_view.go @@ -319,3 +319,14 @@ func (d StaticSystemView) DeregisterRotationJob(_ context.Context, _ *rotation.R func (d StaticSystemView) DownloadExtractVerifyPlugin(_ context.Context, _ *pluginutil.PluginRunner) error { return errors.New("DownloadExtractVerifyPlugin is not implemented in StaticSystemView") } + +// PluginLicenseUtil defines the functions needed to request License and PluginEnv +// by the plugin licensing under github.com/hashicorp/vault-licensing +// This only should be used by the plugin to get the license and plugin environment +type PluginLicenseUtil interface { + // License returns the raw license of the running Vault instance + License() (string, error) + + // PluginEnv returns Vault environment information used by plugins + PluginEnv(context.Context) (*PluginEnvironment, error) +} diff --git a/vendor/modules.txt b/vendor/modules.txt index de53d08..fac0a7c 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -284,7 +284,7 @@ github.com/hashicorp/hcl/json/token # github.com/hashicorp/vault/api v1.21.0 ## explicit; go 1.23.0 github.com/hashicorp/vault/api -# github.com/hashicorp/vault/sdk v0.19.0 +# github.com/hashicorp/vault/sdk v0.20.0 ## explicit; go 1.25.0 github.com/hashicorp/vault/sdk/database/dbplugin github.com/hashicorp/vault/sdk/database/helper/cacheutil