chore: bump Go dependencies

This commit is contained in:
lzap 2025-08-03 15:24:15 +00:00 committed by Simon de Vlieger
parent b3d1e4cf13
commit e118df5dfd
1119 changed files with 126580 additions and 8706 deletions

201
vendor/github.com/speakeasy-api/jsonpath/LICENSE generated vendored Normal file
View file

@ -0,0 +1,201 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View file

@ -0,0 +1,31 @@
package config
type Option func(*config)
// WithPropertyNameExtension enables the use of the "~" character to access a property key.
// It is not enabled by default as this is outside of RFC 9535, but is important for several use-cases
func WithPropertyNameExtension() Option {
return func(cfg *config) {
cfg.propertyNameExtension = true
}
}
type Config interface {
PropertyNameEnabled() bool
}
type config struct {
propertyNameExtension bool
}
func (c *config) PropertyNameEnabled() bool {
return c.propertyNameExtension
}
func New(opts ...Option) Config {
cfg := &config{}
for _, opt := range opts {
opt(cfg)
}
return cfg
}

View file

@ -0,0 +1,456 @@
package jsonpath
import (
"gopkg.in/yaml.v3"
"strconv"
"strings"
)
// filter-selector = "?" S logical-expr
type filterSelector struct {
// logical-expr = logical-or-expr
expression *logicalOrExpr
}
func (s filterSelector) ToString() string {
return s.expression.ToString()
}
// logical-or-expr = logical-and-expr *(S "||" S logical-and-expr)
type logicalOrExpr struct {
expressions []*logicalAndExpr
}
func (e logicalOrExpr) ToString() string {
builder := strings.Builder{}
for i, expr := range e.expressions {
if i > 0 {
builder.WriteString(" || ")
}
builder.WriteString(expr.ToString())
}
return builder.String()
}
// logical-and-expr = basic-expr *(S "&&" S basic-expr)
type logicalAndExpr struct {
expressions []*basicExpr
}
func (e logicalAndExpr) ToString() string {
builder := strings.Builder{}
for i, expr := range e.expressions {
if i > 0 {
builder.WriteString(" && ")
}
builder.WriteString(expr.ToString())
}
return builder.String()
}
// relQuery rel-query = current-node-identifier segments
// current-node-identifier = "@"
type relQuery struct {
segments []*segment
}
func (q relQuery) ToString() string {
builder := strings.Builder{}
builder.WriteString("@")
for _, segment := range q.segments {
builder.WriteString(segment.ToString())
}
return builder.String()
}
// filterQuery filter-query = rel-query / jsonpath-query
type filterQuery struct {
relQuery *relQuery
jsonPathQuery *jsonPathAST
}
func (q filterQuery) ToString() string {
if q.relQuery != nil {
return q.relQuery.ToString()
} else if q.jsonPathQuery != nil {
return q.jsonPathQuery.ToString()
}
return ""
}
// functionArgument function-argument = literal /
//
// filter-query / ; (includes singular-query)
// logical-expr /
// function-expr
type functionArgument struct {
literal *literal
filterQuery *filterQuery
logicalExpr *logicalOrExpr
functionExpr *functionExpr
}
type functionArgType int
const (
functionArgTypeLiteral functionArgType = iota
functionArgTypeNodes
)
type resolvedArgument struct {
kind functionArgType
literal *literal
nodes []*literal
}
func (a functionArgument) Eval(idx index, node *yaml.Node, root *yaml.Node) resolvedArgument {
if a.literal != nil {
return resolvedArgument{kind: functionArgTypeLiteral, literal: a.literal}
} else if a.filterQuery != nil {
result := a.filterQuery.Query(idx, node, root)
lits := make([]*literal, len(result))
for i, node := range result {
lit := nodeToLiteral(node)
lits[i] = &lit
}
if len(result) != 1 {
return resolvedArgument{kind: functionArgTypeNodes, nodes: lits}
} else {
return resolvedArgument{kind: functionArgTypeLiteral, literal: lits[0]}
}
} else if a.logicalExpr != nil {
res := a.logicalExpr.Matches(idx, node, root)
return resolvedArgument{kind: functionArgTypeLiteral, literal: &literal{bool: &res}}
} else if a.functionExpr != nil {
res := a.functionExpr.Evaluate(idx, node, root)
return resolvedArgument{kind: functionArgTypeLiteral, literal: &res}
}
return resolvedArgument{}
}
func (a functionArgument) ToString() string {
builder := strings.Builder{}
if a.literal != nil {
builder.WriteString(a.literal.ToString())
} else if a.filterQuery != nil {
builder.WriteString(a.filterQuery.ToString())
} else if a.logicalExpr != nil {
builder.WriteString(a.logicalExpr.ToString())
} else if a.functionExpr != nil {
builder.WriteString(a.functionExpr.ToString())
}
return builder.String()
}
//function-name = function-name-first *function-name-char
//function-name-first = LCALPHA
//function-name-char = function-name-first / "_" / DIGIT
//LCALPHA = %x61-7A ; "a".."z"
//
type functionType int
const (
functionTypeLength functionType = iota
functionTypeCount
functionTypeMatch
functionTypeSearch
functionTypeValue
)
var functionTypeMap = map[string]functionType{
"length": functionTypeLength,
"count": functionTypeCount,
"match": functionTypeMatch,
"search": functionTypeSearch,
"value": functionTypeValue,
}
func (f functionType) String() string {
for k, v := range functionTypeMap {
if v == f {
return k
}
}
return "unknown"
}
// functionExpr function-expr = function-name "(" S [function-argument
// *(S "," S function-argument)] S ")"
type functionExpr struct {
funcType functionType
args []*functionArgument
}
func (e functionExpr) ToString() string {
builder := strings.Builder{}
builder.WriteString(e.funcType.String())
builder.WriteString("(")
for i, arg := range e.args {
if i > 0 {
builder.WriteString(", ")
}
builder.WriteString(arg.ToString())
}
builder.WriteString(")")
return builder.String()
}
// testExpr test-expr = [logical-not-op S]
//
// (filter-query / ; existence/non-existence
// function-expr) ; LogicalType or NodesType
type testExpr struct {
not bool
filterQuery *filterQuery
functionExpr *functionExpr
}
func (e testExpr) ToString() string {
builder := strings.Builder{}
if e.not {
builder.WriteString("!")
}
if e.filterQuery != nil {
builder.WriteString(e.filterQuery.ToString())
} else if e.functionExpr != nil {
builder.WriteString(e.functionExpr.ToString())
}
return builder.String()
}
// basicExpr basic-expr =
//
// paren-expr /
// comparison-expr /
// test-expr
type basicExpr struct {
parenExpr *parenExpr
comparisonExpr *comparisonExpr
testExpr *testExpr
}
func (e basicExpr) ToString() string {
if e.parenExpr != nil {
return e.parenExpr.ToString()
} else if e.comparisonExpr != nil {
return e.comparisonExpr.ToString()
} else if e.testExpr != nil {
return e.testExpr.ToString()
}
return ""
}
// literal literal = number /
// . string-literal /
// . true / false / null
type literal struct {
// we generally decompose these into their component parts for easier evaluation
integer *int
float64 *float64
string *string
bool *bool
null *bool
node *yaml.Node
}
func (l literal) ToString() string {
if l.integer != nil {
return strconv.Itoa(*l.integer)
} else if l.float64 != nil {
return strconv.FormatFloat(*l.float64, 'f', -1, 64)
} else if l.string != nil {
builder := strings.Builder{}
builder.WriteString("'")
builder.WriteString(escapeString(*l.string))
builder.WriteString("'")
return builder.String()
} else if l.bool != nil {
if *l.bool {
return "true"
} else {
return "false"
}
} else if l.null != nil {
if *l.null {
return "null"
} else {
return "null"
}
} else if l.node != nil {
switch l.node.Kind {
case yaml.ScalarNode:
return l.node.Value
case yaml.SequenceNode:
builder := strings.Builder{}
builder.WriteString("[")
for i, child := range l.node.Content {
if i > 0 {
builder.WriteString(",")
}
builder.WriteString(literal{node: child}.ToString())
}
builder.WriteString("]")
return builder.String()
case yaml.MappingNode:
builder := strings.Builder{}
builder.WriteString("{")
for i, child := range l.node.Content {
if i > 0 {
builder.WriteString(",")
}
builder.WriteString(literal{node: child}.ToString())
}
builder.WriteString("}")
return builder.String()
}
}
return ""
}
func escapeString(value string) string {
b := strings.Builder{}
for i := 0; i < len(value); i++ {
if value[i] == '\n' {
b.WriteString("\\\\n")
} else if value[i] == '\\' {
b.WriteString("\\\\")
} else if value[i] == '\'' {
b.WriteString("\\'")
} else {
b.WriteByte(value[i])
}
}
return b.String()
}
type absQuery jsonPathAST
func (q absQuery) ToString() string {
builder := strings.Builder{}
builder.WriteString("$")
for _, segment := range q.segments {
builder.WriteString(segment.ToString())
}
return builder.String()
}
// singularQuery singular-query = rel-singular-query / abs-singular-query
type singularQuery struct {
relQuery *relQuery
absQuery *absQuery
}
func (q singularQuery) ToString() string {
if q.relQuery != nil {
return q.relQuery.ToString()
} else if q.absQuery != nil {
return q.absQuery.ToString()
}
return ""
}
// comparable
//
// comparable = literal /
// singular-query / ; singular query value
// function-expr ; ValueType
type comparable struct {
literal *literal
singularQuery *singularQuery
functionExpr *functionExpr
}
func (c comparable) ToString() string {
if c.literal != nil {
return c.literal.ToString()
} else if c.singularQuery != nil {
return c.singularQuery.ToString()
} else if c.functionExpr != nil {
return c.functionExpr.ToString()
}
return ""
}
// comparisonExpr represents a comparison expression
//
// comparison-expr = comparable S comparison-op S comparable
// literal = number / string-literal /
// true / false / null
// comparable = literal /
// singular-query / ; singular query value
// function-expr ; ValueType
// comparison-op = "==" / "!=" /
// "<=" / ">=" /
// "<" / ">"
type comparisonExpr struct {
left *comparable
op comparisonOperator
right *comparable
}
func (e comparisonExpr) ToString() string {
builder := strings.Builder{}
builder.WriteString(e.left.ToString())
builder.WriteString(" ")
builder.WriteString(e.op.ToString())
builder.WriteString(" ")
builder.WriteString(e.right.ToString())
return builder.String()
}
// existExpr represents an existence expression
type existExpr struct {
query string
}
// parenExpr represents a parenthesized expression
//
// paren-expr = [logical-not-op S] "(" S logical-expr S ")"
type parenExpr struct {
// "!"
not bool
// "(" logicalOrExpr ")"
expr *logicalOrExpr
}
func (e parenExpr) ToString() string {
builder := strings.Builder{}
if e.not {
builder.WriteString("!")
}
builder.WriteString("(")
builder.WriteString(e.expr.ToString())
builder.WriteString(")")
return builder.String()
}
// comparisonOperator represents a comparison operator
type comparisonOperator int
const (
equalTo comparisonOperator = iota
notEqualTo
lessThan
lessThanEqualTo
greaterThan
greaterThanEqualTo
)
func (o comparisonOperator) ToString() string {
switch o {
case equalTo:
return "=="
case notEqualTo:
return "!="
case lessThan:
return "<"
case lessThanEqualTo:
return "<="
case greaterThan:
return ">"
case greaterThanEqualTo:
return ">="
}
return ""
}

View file

@ -0,0 +1,35 @@
package jsonpath
import (
"fmt"
"github.com/speakeasy-api/jsonpath/pkg/jsonpath/config"
"github.com/speakeasy-api/jsonpath/pkg/jsonpath/token"
"gopkg.in/yaml.v3"
)
func NewPath(input string, opts ...config.Option) (*JSONPath, error) {
tokenizer := token.NewTokenizer(input, opts...)
tokens := tokenizer.Tokenize()
for i := 0; i < len(tokens); i++ {
if tokens[i].Token == token.ILLEGAL {
return nil, fmt.Errorf(tokenizer.ErrorString(&tokens[i], "unexpected token"))
}
}
parser := newParserPrivate(tokenizer, tokens, opts...)
err := parser.parse()
if err != nil {
return nil, err
}
return parser, nil
}
func (p *JSONPath) Query(root *yaml.Node) []*yaml.Node {
return p.ast.Query(root, root)
}
func (p *JSONPath) String() string {
if p == nil {
return ""
}
return p.ast.ToString()
}

View file

@ -0,0 +1,729 @@
package jsonpath
import (
"errors"
"fmt"
"github.com/speakeasy-api/jsonpath/pkg/jsonpath/config"
"github.com/speakeasy-api/jsonpath/pkg/jsonpath/token"
"strconv"
"strings"
)
const MaxSafeFloat int64 = 9007199254740991
type mode int
const (
modeNormal mode = iota
modeSingular
)
// JSONPath represents a JSONPath parser.
type JSONPath struct {
tokenizer *token.Tokenizer
tokens []token.TokenInfo
ast jsonPathAST
current int
mode []mode
config config.Config
}
// newParserPrivate creates a new JSONPath with the given tokens.
func newParserPrivate(tokenizer *token.Tokenizer, tokens []token.TokenInfo, opts ...config.Option) *JSONPath {
return &JSONPath{tokenizer, tokens, jsonPathAST{}, 0, []mode{modeNormal}, config.New(opts...)}
}
// parse parses the JSONPath tokens and returns the root node of the AST.
//
// jsonpath-query = root-identifier segments
func (p *JSONPath) parse() error {
if len(p.tokens) == 0 {
return fmt.Errorf("empty JSONPath expression")
}
if p.tokens[p.current].Token != token.ROOT {
return p.parseFailure(&p.tokens[p.current], "expected '$'")
}
p.current++
for p.current < len(p.tokens) {
segment, err := p.parseSegment()
if err != nil {
return err
}
p.ast.segments = append(p.ast.segments, segment)
}
return nil
}
func (p *JSONPath) parseFailure(target *token.TokenInfo, msg string) error {
return errors.New(p.tokenizer.ErrorString(target, msg))
}
// peek returns true if the upcoming token matches the given token type.
func (p *JSONPath) peek(token token.Token) bool {
return p.current+1 < len(p.tokens) && p.tokens[p.current+1].Token == token
}
// peek returns true if the upcoming token matches the given token type.
func (p *JSONPath) next(token token.Token) bool {
return p.current < len(p.tokens) && p.tokens[p.current].Token == token
}
// expect consumes the current token if it matches the given token type.
func (p *JSONPath) expect(token token.Token) bool {
if p.peek(token) {
p.current++
return true
}
return false
}
// isComparisonOperator returns true if the given token is a comparison operator.
func (p *JSONPath) isComparisonOperator(tok token.Token) bool {
return tok == token.EQ || tok == token.NE || tok == token.GT || tok == token.GE || tok == token.LT || tok == token.LE
}
func (p *JSONPath) parseSegment() (*segment, error) {
currentToken := p.tokens[p.current]
if currentToken.Token == token.RECURSIVE {
if p.mode[len(p.mode)-1] == modeSingular {
return nil, p.parseFailure(&p.tokens[p.current], "unexpected recursive descent in singular query")
}
p.current++
child, err := p.parseInnerSegment()
if err != nil {
return nil, err
}
return &segment{kind: segmentKindDescendant, descendant: child}, nil
} else if currentToken.Token == token.CHILD || currentToken.Token == token.BRACKET_LEFT {
if currentToken.Token == token.CHILD {
p.current++
}
child, err := p.parseInnerSegment()
if err != nil {
return nil, err
}
return &segment{kind: segmentKindChild, child: child}, nil
} else if p.config.PropertyNameEnabled() && currentToken.Token == token.PROPERTY_NAME {
p.current++
return &segment{kind: segmentKindProperyName}, nil
}
return nil, p.parseFailure(&currentToken, "unexpected token when parsing segment")
}
func (p *JSONPath) parseInnerSegment() (retValue *innerSegment, err error) {
defer func() {
if p.mode[len(p.mode)-1] == modeSingular && retValue != nil {
if len(retValue.selectors) > 1 {
retValue = nil
err = p.parseFailure(&p.tokens[p.current], "unexpected multiple selectors in singular query")
return
} else if retValue.kind == segmentDotWildcard {
retValue = nil
err = p.parseFailure(&p.tokens[p.current], "unexpected wildcard in singular query")
return
}
}
}()
// .*
// .STRING
// []
if p.current >= len(p.tokens) {
return nil, p.parseFailure(nil, "unexpected end of input")
}
firstToken := p.tokens[p.current]
if firstToken.Token == token.WILDCARD {
p.current += 1
return &innerSegment{segmentDotWildcard, "", nil}, nil
} else if firstToken.Token == token.STRING {
dotName := p.tokens[p.current].Literal
p.current += 1
return &innerSegment{segmentDotMemberName, dotName, nil}, nil
} else if firstToken.Token == token.BRACKET_LEFT {
prior := p.current
p.current += 1
selectors := []*selector{}
for p.current < len(p.tokens) {
innerSelector, err := p.parseSelector()
if err != nil {
p.current = prior
return nil, err
}
selectors = append(selectors, innerSelector)
if len(p.tokens) <= p.current {
return nil, p.parseFailure(&p.tokens[p.current-1], "unexpected end of input")
}
if p.tokens[p.current].Token == token.BRACKET_RIGHT {
break
} else if p.tokens[p.current].Token == token.COMMA {
p.current++
}
}
if p.tokens[p.current].Token != token.BRACKET_RIGHT {
prior = p.current
return nil, p.parseFailure(&p.tokens[p.current], "expected ']'")
}
p.current += 1
return &innerSegment{kind: segmentLongHand, dotName: "", selectors: selectors}, nil
}
return nil, p.parseFailure(&firstToken, "unexpected token when parsing inner segment")
}
func (p *JSONPath) parseSelector() (retSelector *selector, err error) {
//selector = name-selector /
// wildcard-selector /
// slice-selector /
// index-selector /
// filter-selector
initial := p.current
defer func() {
if p.mode[len(p.mode)-1] == modeSingular && retSelector != nil {
if retSelector.kind == selectorSubKindWildcard {
err = p.parseFailure(&p.tokens[initial], "unexpected wildcard in singular query")
retSelector = nil
} else if retSelector.kind == selectorSubKindArraySlice {
err = p.parseFailure(&p.tokens[initial], "unexpected slice in singular query")
retSelector = nil
}
}
}()
// name-selector = string-literal
if p.tokens[p.current].Token == token.STRING_LITERAL {
name := p.tokens[p.current].Literal
p.current++
return &selector{kind: selectorSubKindName, name: name}, nil
// wildcard-selector = "*"
} else if p.tokens[p.current].Token == token.WILDCARD {
p.current++
return &selector{kind: selectorSubKindWildcard}, nil
} else if p.tokens[p.current].Token == token.INTEGER {
// peek ahead to see if it's a slice
if p.peek(token.ARRAY_SLICE) {
slice, err := p.parseSliceSelector()
if err != nil {
return nil, err
}
return &selector{kind: selectorSubKindArraySlice, slice: slice}, nil
}
// peek ahead to see if we close the array index properly
if !p.peek(token.BRACKET_RIGHT) && !p.peek(token.COMMA) {
return nil, p.parseFailure(&p.tokens[p.current], "expected ']' or ','")
}
// else it's an index
lit := p.tokens[p.current].Literal
// make sure it's not -0
if lit == "-0" {
return nil, p.parseFailure(&p.tokens[p.current], "-0 unexpected")
}
// make sure lit is an integer
i, err := strconv.ParseInt(lit, 10, 64)
if err != nil {
return nil, p.parseFailure(&p.tokens[p.current], "expected an integer")
}
err = p.checkSafeInteger(i, lit)
if err != nil {
return nil, err
}
p.current++
return &selector{kind: selectorSubKindArrayIndex, index: i}, nil
} else if p.tokens[p.current].Token == token.ARRAY_SLICE {
slice, err := p.parseSliceSelector()
if err != nil {
return nil, err
}
return &selector{kind: selectorSubKindArraySlice, slice: slice}, nil
} else if p.tokens[p.current].Token == token.FILTER {
return p.parseFilterSelector()
}
return nil, p.parseFailure(&p.tokens[p.current], "unexpected token when parsing selector")
}
func (p *JSONPath) parseSliceSelector() (*slice, error) {
// slice-selector = [start S] ":" S [end S] [":" [S step]]
var start, end, step *int64
// parse the start index
if p.tokens[p.current].Token == token.INTEGER {
literal := p.tokens[p.current].Literal
i, err := strconv.ParseInt(literal, 10, 64)
if err != nil {
return nil, p.parseFailure(&p.tokens[p.current], "expected an integer")
}
err = p.checkSafeInteger(i, literal)
if err != nil {
return nil, err
}
start = &i
p.current += 1
}
// Expect a colon
if p.tokens[p.current].Token != token.ARRAY_SLICE {
return nil, p.parseFailure(&p.tokens[p.current], "expected ':'")
}
p.current++
// parse the end index
if p.tokens[p.current].Token == token.INTEGER {
literal := p.tokens[p.current].Literal
i, err := strconv.ParseInt(literal, 10, 64)
if err != nil {
return nil, p.parseFailure(&p.tokens[p.current], "expected an integer")
}
err = p.checkSafeInteger(i, literal)
if err != nil {
return nil, err
}
end = &i
p.current++
}
// Check for an optional second colon and step value
if p.tokens[p.current].Token == token.ARRAY_SLICE {
p.current++
if p.tokens[p.current].Token == token.INTEGER {
literal := p.tokens[p.current].Literal
i, err := strconv.ParseInt(literal, 10, 64)
if err != nil {
return nil, p.parseFailure(&p.tokens[p.current], "expected an integer")
}
err = p.checkSafeInteger(i, literal)
if err != nil {
return nil, err
}
step = &i
p.current++
}
}
if p.tokens[p.current].Token != token.BRACKET_RIGHT {
return nil, p.parseFailure(&p.tokens[p.current], "expected ']'")
}
return &slice{start: start, end: end, step: step}, nil
}
func (p *JSONPath) checkSafeInteger(i int64, literal string) error {
if i > MaxSafeFloat || i < -MaxSafeFloat {
return p.parseFailure(&p.tokens[p.current], "outside bounds for safe integers")
}
if literal == "-0" {
return p.parseFailure(&p.tokens[p.current], "-0 unexpected")
}
return nil
}
func (p *JSONPath) parseFilterSelector() (*selector, error) {
if p.tokens[p.current].Token != token.FILTER {
return nil, p.parseFailure(&p.tokens[p.current], "expected '?'")
}
p.current++
expr, err := p.parseLogicalOrExpr()
if err != nil {
return nil, err
}
return &selector{kind: selectorSubKindFilter, filter: &filterSelector{expr}}, nil
}
func (p *JSONPath) parseLogicalOrExpr() (*logicalOrExpr, error) {
var expr logicalOrExpr
for {
andExpr, err := p.parseLogicalAndExpr()
if err != nil {
return nil, err
}
expr.expressions = append(expr.expressions, andExpr)
if !p.next(token.OR) {
break
}
p.current++
}
return &expr, nil
}
func (p *JSONPath) parseLogicalAndExpr() (*logicalAndExpr, error) {
var expr logicalAndExpr
for {
basicExpr, err := p.parseBasicExpr()
if err != nil {
return nil, err
}
expr.expressions = append(expr.expressions, basicExpr)
if !p.next(token.AND) {
break
}
p.current++
}
return &expr, nil
}
func (p *JSONPath) parseBasicExpr() (*basicExpr, error) {
//basic-expr = paren-expr /
// comparison-expr /
// test-expr
switch p.tokens[p.current].Token {
case token.NOT:
p.current++
expr, err := p.parseLogicalOrExpr()
if err != nil {
return nil, err
}
// Inspect if the expr is topped by a parenExpr -- if so we can simplify
if len(expr.expressions) == 1 && len(expr.expressions[0].expressions) == 1 && expr.expressions[0].expressions[0].parenExpr != nil {
child := expr.expressions[0].expressions[0].parenExpr
child.not = !child.not
return &basicExpr{parenExpr: child}, nil
}
return &basicExpr{parenExpr: &parenExpr{not: true, expr: expr}}, nil
case token.PAREN_LEFT:
p.current++
expr, err := p.parseLogicalOrExpr()
if err != nil {
return nil, err
}
if p.tokens[p.current].Token != token.PAREN_RIGHT {
return nil, p.parseFailure(&p.tokens[p.current], "expected ')'")
}
p.current++
return &basicExpr{parenExpr: &parenExpr{not: false, expr: expr}}, nil
}
prevCurrent := p.current
comparisonExpr, comparisonErr := p.parseComparisonExpr()
if comparisonErr == nil {
return &basicExpr{comparisonExpr: comparisonExpr}, nil
}
p.current = prevCurrent
testExpr, testErr := p.parseTestExpr()
if testErr == nil {
return &basicExpr{testExpr: testExpr}, nil
}
p.current = prevCurrent
return nil, p.parseFailure(&p.tokens[p.current], fmt.Sprintf("could not parse query: expected either testExpr [err: %s] or comparisonExpr: [err: %s]", testErr.Error(), comparisonErr.Error()))
}
func (p *JSONPath) parseComparisonExpr() (*comparisonExpr, error) {
left, err := p.parseComparable()
if err != nil {
return nil, err
}
if !p.isComparisonOperator(p.tokens[p.current].Token) {
return nil, p.parseFailure(&p.tokens[p.current], "expected comparison operator")
}
operator := p.tokens[p.current].Token
var op comparisonOperator
switch operator {
case token.EQ:
op = equalTo
case token.NE:
op = notEqualTo
case token.LT:
op = lessThan
case token.LE:
op = lessThanEqualTo
case token.GT:
op = greaterThan
case token.GE:
op = greaterThanEqualTo
default:
return nil, p.parseFailure(&p.tokens[p.current], "expected comparison operator")
}
p.current++
right, err := p.parseComparable()
if err != nil {
return nil, err
}
return &comparisonExpr{left: left, op: op, right: right}, nil
}
func (p *JSONPath) parseComparable() (*comparable, error) {
// comparable = literal /
// singular-query / ; singular query value
// function-expr ; ValueType
if literal, err := p.parseLiteral(); err == nil {
return &comparable{literal: literal}, nil
}
if funcExpr, err := p.parseFunctionExpr(); err == nil {
if funcExpr.funcType == functionTypeMatch {
return nil, p.parseFailure(&p.tokens[p.current], "match result cannot be compared")
} else if funcExpr.funcType == functionTypeSearch {
return nil, p.parseFailure(&p.tokens[p.current], "search result cannot be compared")
}
return &comparable{functionExpr: funcExpr}, nil
}
switch p.tokens[p.current].Token {
case token.ROOT:
p.current++
query, err := p.parseSingleQuery()
if err != nil {
return nil, err
}
return &comparable{singularQuery: &singularQuery{absQuery: &absQuery{segments: query.segments}}}, nil
case token.CURRENT:
p.current++
query, err := p.parseSingleQuery()
if err != nil {
return nil, err
}
return &comparable{singularQuery: &singularQuery{relQuery: &relQuery{segments: query.segments}}}, nil
default:
return nil, p.parseFailure(&p.tokens[p.current], "expected literal or query")
}
}
func (p *JSONPath) parseQuery() (*jsonPathAST, error) {
var query jsonPathAST
p.mode = append(p.mode, modeNormal)
for p.current < len(p.tokens) {
prior := p.current
segment, err := p.parseSegment()
if err != nil {
p.current = prior
break
}
query.segments = append(query.segments, segment)
}
p.mode = p.mode[:len(p.mode)-1]
return &query, nil
}
func (p *JSONPath) parseTestExpr() (*testExpr, error) {
//test-expr = [logical-not-op S]
// (filter-query / ; existence/non-existence
// function-expr) ; LogicalType or NodesType
//filter-query = rel-query / jsonpath-query
//rel-query = current-node-identifier segments
//current-node-identifier = "@"
not := false
if p.tokens[p.current].Token == token.NOT {
not = true
p.current++
}
switch p.tokens[p.current].Token {
case token.CURRENT:
p.current++
query, err := p.parseQuery()
if err != nil {
return nil, err
}
return &testExpr{filterQuery: &filterQuery{relQuery: &relQuery{segments: query.segments}}, not: not}, nil
case token.ROOT:
p.current++
query, err := p.parseQuery()
if err != nil {
return nil, err
}
return &testExpr{filterQuery: &filterQuery{jsonPathQuery: &jsonPathAST{segments: query.segments}}, not: not}, nil
default:
funcExpr, err := p.parseFunctionExpr()
if err != nil {
return nil, err
}
if funcExpr.funcType == functionTypeCount {
return nil, p.parseFailure(&p.tokens[p.current], "count function must be compared")
}
if funcExpr.funcType == functionTypeLength {
return nil, p.parseFailure(&p.tokens[p.current], "length function must be compared")
}
if funcExpr.funcType == functionTypeValue {
return nil, p.parseFailure(&p.tokens[p.current], "length function must be compared")
}
return &testExpr{functionExpr: funcExpr, not: not}, nil
}
return nil, p.parseFailure(&p.tokens[p.current], "unexpected token when parsing test expression")
}
func (p *JSONPath) parseFunctionExpr() (*functionExpr, error) {
functionName := p.tokens[p.current].Literal
if p.current+1 >= len(p.tokens) || p.tokens[p.current+1].Token != token.PAREN_LEFT {
return nil, p.parseFailure(&p.tokens[p.current], "expected '(' after function")
}
p.current += 2
args := []*functionArgument{}
switch functionTypeMap[functionName] {
case functionTypeLength:
arg, err := p.parseFunctionArgument(true)
if err != nil {
return nil, err
}
args = append(args, arg)
case functionTypeCount:
arg, err := p.parseFunctionArgument(false)
if err != nil {
return nil, err
}
if arg.literal != nil && arg.literal.node == nil {
return nil, p.parseFailure(&p.tokens[p.current], "count function only supports containers")
}
args = append(args, arg)
case functionTypeValue:
arg, err := p.parseFunctionArgument(false)
if err != nil {
return nil, err
}
args = append(args, arg)
case functionTypeMatch:
fallthrough
case functionTypeSearch:
arg, err := p.parseFunctionArgument(false)
if err != nil {
return nil, err
}
args = append(args, arg)
if p.tokens[p.current].Token != token.COMMA {
return nil, p.parseFailure(&p.tokens[p.current], "expected ','")
}
p.current++
arg, err = p.parseFunctionArgument(false)
if err != nil {
return nil, err
}
args = append(args, arg)
}
if p.tokens[p.current].Token != token.PAREN_RIGHT {
return nil, p.parseFailure(&p.tokens[p.current], "expected ')'")
}
p.current++
return &functionExpr{funcType: functionTypeMap[functionName], args: args}, nil
}
func (p *JSONPath) parseSingleQuery() (*jsonPathAST, error) {
var query jsonPathAST
for p.current < len(p.tokens) {
try := p.current
p.mode = append(p.mode, modeSingular)
segment, err := p.parseSegment()
if err != nil {
// rollback
p.mode = p.mode[:len(p.mode)-1]
p.current = try
break
}
p.mode = p.mode[:len(p.mode)-1]
query.segments = append(query.segments, segment)
}
//if len(query.segments) == 0 {
// return nil, p.parseFailure(p.tokens[p.current], "expected at least one segment")
//}
return &query, nil
}
func (p *JSONPath) parseFunctionArgument(single bool) (*functionArgument, error) {
//function-argument = literal /
// filter-query / ; (includes singular-query)
// logical-expr /
// function-expr
if lit, err := p.parseLiteral(); err == nil {
return &functionArgument{literal: lit}, nil
}
switch p.tokens[p.current].Token {
case token.CURRENT:
p.current++
var query *jsonPathAST
var err error
if single {
query, err = p.parseSingleQuery()
} else {
query, err = p.parseQuery()
}
if err != nil {
return nil, err
}
return &functionArgument{filterQuery: &filterQuery{relQuery: &relQuery{segments: query.segments}}}, nil
case token.ROOT:
p.current++
var query *jsonPathAST
var err error
if single {
query, err = p.parseSingleQuery()
} else {
query, err = p.parseQuery()
}
if err != nil {
return nil, err
}
return &functionArgument{filterQuery: &filterQuery{jsonPathQuery: &jsonPathAST{segments: query.segments}}}, nil
}
if expr, err := p.parseLogicalOrExpr(); err == nil {
return &functionArgument{logicalExpr: expr}, nil
}
if funcExpr, err := p.parseFunctionExpr(); err == nil {
return &functionArgument{functionExpr: funcExpr}, nil
}
return nil, p.parseFailure(&p.tokens[p.current], "unexpected token for function argument")
}
func (p *JSONPath) parseLiteral() (*literal, error) {
switch p.tokens[p.current].Token {
case token.STRING_LITERAL:
lit := p.tokens[p.current].Literal
p.current++
return &literal{string: &lit}, nil
case token.INTEGER:
lit := p.tokens[p.current].Literal
p.current++
i, err := strconv.Atoi(lit)
if err != nil {
return nil, p.parseFailure(&p.tokens[p.current], "expected integer")
}
return &literal{integer: &i}, nil
case token.FLOAT:
lit := p.tokens[p.current].Literal
p.current++
f, err := strconv.ParseFloat(lit, 64)
if err != nil {
return nil, p.parseFailure(&p.tokens[p.current], "expected float")
}
return &literal{float64: &f}, nil
case token.TRUE:
p.current++
res := true
return &literal{bool: &res}, nil
case token.FALSE:
p.current++
res := false
return &literal{bool: &res}, nil
case token.NULL:
p.current++
res := true
return &literal{null: &res}, nil
}
return nil, p.parseFailure(&p.tokens[p.current], "expected literal")
}
type jsonPathAST struct {
// "$"
segments []*segment
}
func (q jsonPathAST) ToString() string {
b := strings.Builder{}
b.WriteString("$")
for _, seg := range q.segments {
b.WriteString(seg.ToString())
}
return b.String()
}

View file

@ -0,0 +1,83 @@
package jsonpath
import (
"gopkg.in/yaml.v3"
"strings"
)
type segmentKind int
const (
segmentKindChild segmentKind = iota // .
segmentKindDescendant // ..
segmentKindProperyName // ~ (extension only)
)
type segment struct {
kind segmentKind
child *innerSegment
descendant *innerSegment
}
type segmentSubKind int
const (
segmentDotWildcard segmentSubKind = iota // .*
segmentDotMemberName // .property
segmentLongHand // [ selector[] ]
)
func (s segment) ToString() string {
switch s.kind {
case segmentKindChild:
if s.child.kind != segmentLongHand {
return "." + s.child.ToString()
} else {
return s.child.ToString()
}
case segmentKindDescendant:
return ".." + s.descendant.ToString()
case segmentKindProperyName:
return "~"
}
panic("unknown segment kind")
}
type innerSegment struct {
kind segmentSubKind
dotName string
selectors []*selector
}
func (s innerSegment) ToString() string {
builder := strings.Builder{}
switch s.kind {
case segmentDotWildcard:
builder.WriteString("*")
break
case segmentDotMemberName:
builder.WriteString(s.dotName)
break
case segmentLongHand:
builder.WriteString("[")
for i, selector := range s.selectors {
builder.WriteString(selector.ToString())
if i < len(s.selectors)-1 {
builder.WriteString(", ")
}
}
builder.WriteString("]")
break
default:
panic("unknown child segment kind")
}
return builder.String()
}
func descend(value *yaml.Node, root *yaml.Node) []*yaml.Node {
result := []*yaml.Node{value}
for _, child := range value.Content {
result = append(result, descend(child, root)...)
}
return result
}

View file

@ -0,0 +1,63 @@
package jsonpath
import (
"fmt"
"strconv"
"strings"
)
type selectorSubKind int
const (
selectorSubKindWildcard selectorSubKind = iota
selectorSubKindName
selectorSubKindArraySlice
selectorSubKindArrayIndex
selectorSubKindFilter
)
type slice struct {
start *int64
end *int64
step *int64
}
type selector struct {
kind selectorSubKind
name string
index int64
slice *slice
filter *filterSelector
}
func (s selector) ToString() string {
switch s.kind {
case selectorSubKindName:
return "'" + escapeString(s.name) + "'"
case selectorSubKindArrayIndex:
// int to string
return strconv.FormatInt(s.index, 10)
case selectorSubKindFilter:
return "?" + s.filter.ToString()
case selectorSubKindWildcard:
return "*"
case selectorSubKindArraySlice:
builder := strings.Builder{}
if s.slice.start != nil {
builder.WriteString(strconv.FormatInt(*s.slice.start, 10))
}
builder.WriteString(":")
if s.slice.end != nil {
builder.WriteString(strconv.FormatInt(*s.slice.end, 10))
}
if s.slice.step != nil {
builder.WriteString(":")
builder.WriteString(strconv.FormatInt(*s.slice.step, 10))
}
return builder.String()
default:
panic(fmt.Sprintf("unimplemented selector kind: %v", s.kind))
}
return ""
}

View file

@ -0,0 +1,776 @@
package token
import (
"fmt"
"github.com/speakeasy-api/jsonpath/pkg/jsonpath/config"
"strconv"
"strings"
)
// *****************************************************************************
// The Tokenizer is responsible for tokenizing the jsonpath expression. This means
// * removing whitespace
// * scanning strings
// * and detecting illegal characters
// *****************************************************************************
// Token represents a lexical token in a JSONPath expression.
type Token int
// We are allowed the following tokens
//jsonpath-query = root-identifier segments
//segments = *(segment)
//root-identifier = "$"
//selector = name-selector /
// wildcard-selector /
// slice-selector /
// index-selector /
// filter-selector
//name-selector = string-literal
//wildcard-selector = "*"
//index-selector = int ; decimal integer
//
//int = "0" /
// (["-"] DIGIT1 *DIGIT) ; - optional
//DIGIT1 = %x31-39 ; 1-9 non-zero digit
//slice-selector = [start] ":" [end] [":" [step]]
//
//start = int ; included in selection
//end = int ; not included in selection
//step = int ; default: 1
//filter-selector = "?" logical-expr
//logical-expr = logical-or-expr
//logical-or-expr = logical-and-expr *("||" logical-and-expr)
// ; disjunction
// ; binds less tightly than conjunction
//logical-and-expr = basic-expr *("&&" basic-expr)
// ; conjunction
// ; binds more tightly than disjunction
//
//basic-expr = paren-expr /
// comparison-expr /
// test-expr
//
//paren-expr = [logical-not-op] "(" logical-expr ")"
// ; parenthesized expression
//logical-not-op = "!" ; logical NOT operator
//test-expr = [logical-not-op S]
// (filter-query / ; existence/non-existence
// function-expr) ; LogicalType or NodesType
//filter-query = rel-query / jsonpath-query
//rel-query = current-node-identifier segments
//current-node-identifier = "@"
//comparison-expr = comparable comparison-op comparable
//literal = number / string-literal /
// true / false / null
//comparable = literal /
// singular-query / ; singular query value
// function-expr ; ValueType
//comparison-op = "==" / "!=" /
// "<=" / ">=" /
// "<" / ">"
//
//singular-query = rel-singular-query / abs-singular-query
//rel-singular-query = current-node-identifier singular-query-segments
//abs-singular-query = root-identifier singular-query-segments
//singular-query-segments = *(S (name-segment / index-segment))
//name-segment = ("[" name-selector "]") /
// ("." member-name-shorthand)
//index-segment = "[" index-selector "]"
//number = (int / "-0") [ frac ] [ exp ] ; decimal number
//frac = "." 1*DIGIT ; decimal fraction
//exp = "e" [ "-" / "+" ] 1*DIGIT ; decimal exponent
//true = %x74.72.75.65 ; true
//false = %x66.61.6c.73.65 ; false
//null = %x6e.75.6c.6c ; null
//function-name = function-name-first *function-name-char
//function-name-first = LCALPHA
//function-name-char = function-name-first / "_" / DIGIT
//LCALPHA = %x61-7A ; "a".."z"
//
//function-expr = function-name "(" [function-argument
// *(S "," function-argument)] ")"
//function-argument = literal /
// filter-query / ; (includes singular-query)
// logical-expr /
// function-expr
//segment = child-segment / descendant-segment
//child-segment = bracketed-selection /
// ("."
// (wildcard-selector /
// member-name-shorthand))
//
//bracketed-selection = "[" selector *(S "," selector) "]"
//
//member-name-shorthand = name-first *name-char
//name-first = ALPHA /
// "_" /
// %x80-D7FF /
// ; skip surrogate code points
// %xE000-10FFFF
//name-char = name-first / DIGIT
//
//DIGIT = %x30-39 ; 0-9
//ALPHA = %x41-5A / %x61-7A ; A-Z / a-z
//descendant-segment = ".." (bracketed-selection /
// wildcard-selector /
// member-name-shorthand)
//
// Figure 2: Collected ABNF of JSONPath Queries
//
//Figure 3 contains the collected ABNF grammar that defines the syntax
//of a JSONPath Normalized Path while also using the rules root-
//identifier, ESC, DIGIT, and DIGIT1 from Figure 2.
//
//normalized-path = root-identifier *(normal-index-segment)
//normal-index-segment = "[" normal-selector "]"
//normal-selector = normal-name-selector / normal-index-selector
//normal-name-selector = %x27 *normal-single-quoted %x27 ; 'string'
//normal-single-quoted = normal-unescaped /
// ESC normal-escapable
//normal-unescaped = ; omit %x0-1F control codes
// %x20-26 /
// ; omit 0x27 '
// %x28-5B /
// ; omit 0x5C \
// %x5D-D7FF /
// ; skip surrogate code points
// %xE000-10FFFF
//
//normal-escapable = %x62 / ; b BS backspace U+0008
// %x66 / ; f FF form feed U+000C
// %x6E / ; n LF line feed U+000A
// %x72 / ; r CR carriage return U+000D
// %x74 / ; t HT horizontal tab U+0009
// "'" / ; ' apostrophe U+0027
// "\" / ; \ backslash (reverse solidus) U+005C
// (%x75 normal-hexchar)
// ; certain values u00xx U+00XX
//normal-hexchar = "0" "0"
// (
// ("0" %x30-37) / ; "00"-"07"
// ; omit U+0008-U+000A BS HT LF
// ("0" %x62) / ; "0b"
// ; omit U+000C-U+000D FF CR
// ("0" %x65-66) / ; "0e"-"0f"
// ("1" normal-HEXDIG)
// )
//normal-HEXDIG = DIGIT / %x61-66 ; "0"-"9", "a"-"f"
//normal-index-selector = "0" / (DIGIT1 *DIGIT)
// ; non-negative decimal integer
// The list of tokens.
const (
ILLEGAL Token = iota
STRING
INTEGER
FLOAT
STRING_LITERAL
TRUE
FALSE
NULL
ROOT
CURRENT
WILDCARD
PROPERTY_NAME
RECURSIVE
CHILD
ARRAY_SLICE
FILTER
PAREN_LEFT
PAREN_RIGHT
BRACKET_LEFT
BRACKET_RIGHT
COMMA
TILDE
AND
OR
NOT
EQ
NE
GT
GE
LT
LE
MATCHES
FUNCTION
)
var SimpleTokens = [...]Token{
STRING,
INTEGER,
STRING_LITERAL,
CHILD,
BRACKET_LEFT,
BRACKET_RIGHT,
ROOT,
}
var tokens = [...]string{
ILLEGAL: "ILLEGAL",
STRING: "STRING",
INTEGER: "INTEGER",
FLOAT: "FLOAT",
STRING_LITERAL: "STRING_LITERAL",
TRUE: "TRUE",
FALSE: "FALSE",
NULL: "NULL",
// root node identifier (Section 2.2)
ROOT: "$",
// current node identifier (Section 2.3.5)
// (valid only within filter selectors)
CURRENT: "@",
WILDCARD: "*",
RECURSIVE: "..",
CHILD: ".",
// start:end:step array slice operator (Section 2.3.4)
ARRAY_SLICE: ":",
// filter selector (Section 2.3.5): selects
// particular children using a logical
// expression
FILTER: "?",
PAREN_LEFT: "(",
PAREN_RIGHT: ")",
BRACKET_LEFT: "[",
BRACKET_RIGHT: "]",
COMMA: ",",
TILDE: "~",
AND: "&&",
OR: "||",
NOT: "!",
EQ: "==",
NE: "!=",
GT: ">",
GE: ">=",
LT: "<",
LE: "<=",
MATCHES: "=~",
FUNCTION: "FUNCTION",
}
// String returns the string representation of the token.
func (tok Token) String() string {
if tok >= 0 && tok < Token(len(tokens)) {
return tokens[tok]
}
return "token(" + strconv.Itoa(int(tok)) + ")"
}
func (tok Tokens) IsSimple() bool {
if len(tok) == 0 {
return false
}
if tok[0].Token != ROOT {
return false
}
for _, token := range tok {
isSimple := false
for _, simpleToken := range SimpleTokens {
if token.Token == simpleToken {
isSimple = true
}
}
if !isSimple {
return false
}
}
return true
}
// When there's an error in the tokenizer, this helps represent it.
func (t Tokenizer) ErrorString(target *TokenInfo, msg string) string {
var errorBuilder strings.Builder
var token TokenInfo
if target == nil {
// grab last token (as value)
token = t.tokens[len(t.tokens)-1]
// set column to +1
token.Column++
target = &token
}
// Write the error message with line and column information
errorBuilder.WriteString(fmt.Sprintf("Error at line %d, column %d: %s\n", target.Line, target.Column, msg))
// Find the start and end positions of the line containing the target token
lineStart := 0
lineEnd := len(t.input)
for i := target.Line - 1; i > 0; i-- {
if pos := strings.LastIndexByte(t.input[:lineStart], '\n'); pos != -1 {
lineStart = pos + 1
break
}
}
if pos := strings.IndexByte(t.input[lineStart:], '\n'); pos != -1 {
lineEnd = lineStart + pos
}
// Extract the line containing the target token
line := t.input[lineStart:lineEnd]
errorBuilder.WriteString(line)
errorBuilder.WriteString("\n")
// Calculate the number of spaces before the target token
spaces := strings.Repeat(" ", target.Column)
// Write the caret symbol pointing to the target token
errorBuilder.WriteString(spaces)
dots := ""
if target.Len > 0 {
dots = strings.Repeat(".", target.Len-1)
}
errorBuilder.WriteString("^" + dots + "\n")
return errorBuilder.String()
}
// When there's an error
func (t Tokenizer) ErrorTokenString(target *TokenInfo, msg string) string {
var errorBuilder strings.Builder
var token TokenInfo
if target == nil {
// grab last token (as value)
token = t.tokens[len(t.tokens)-1]
// set column to +1
token.Column++
target = &token
}
// Write the error message with line and column information
errorBuilder.WriteString(t.ErrorString(target, msg))
// Find the start and end positions of the line containing the target token
lineStart := 0
lineEnd := len(t.input)
for i := target.Line - 1; i > 0; i-- {
if pos := strings.LastIndexByte(t.input[:lineStart], '\n'); pos != -1 {
lineStart = pos + 1
break
}
}
if pos := strings.IndexByte(t.input[lineStart:], '\n'); pos != -1 {
lineEnd = lineStart + pos
}
// Extract the line containing the target token
line := t.input[lineStart:lineEnd]
// Calculate the number of spaces before the target token
for _, token := range t.tokens {
errorBuilder.WriteString(line)
errorBuilder.WriteString("\n")
spaces := strings.Repeat(" ", token.Column)
dots := ""
if token.Len > 0 {
dots = strings.Repeat(".", token.Len-1)
}
errorBuilder.WriteString(spaces)
errorBuilder.WriteString(fmt.Sprintf("^%s %s\n", dots, tokens[token.Token]))
}
return errorBuilder.String()
}
// TokenInfo represents a token and its associated information.
type TokenInfo struct {
Token Token
Line int
Column int
Literal string
Len int
}
// Tokens represents the list of tokens
type Tokens []TokenInfo
// Tokenizer represents a JSONPath tokenizer.
type Tokenizer struct {
input string
pos int
line int
column int
tokens []TokenInfo
stack []Token
illegalWhitespace bool
config config.Config
}
// NewTokenizer creates a new JSONPath tokenizer for the given input string.
func NewTokenizer(input string, opts ...config.Option) *Tokenizer {
cfg := config.New(opts...)
return &Tokenizer{
input: input,
config: cfg,
line: 1,
stack: make([]Token, 0),
}
}
// Tokenize tokenizes the input string and returns a slice of TokenInfo.
func (t *Tokenizer) Tokenize() Tokens {
for t.pos < len(t.input) {
if !t.illegalWhitespace {
t.skipWhitespace()
}
if t.pos >= len(t.input) {
break
}
switch ch := t.input[t.pos]; {
case ch == '$':
t.addToken(ROOT, 1, "")
case ch == '@':
t.addToken(CURRENT, 1, "")
case ch == '*':
t.addToken(WILDCARD, 1, "")
case ch == '~':
if t.config.PropertyNameEnabled() {
t.addToken(PROPERTY_NAME, 1, "")
} else {
t.addToken(ILLEGAL, 1, "invalid property name token without config.PropertyNameExtension set to true")
}
case ch == '.':
if t.peek() == '.' {
t.addToken(RECURSIVE, 2, "")
t.pos++
t.column++
t.illegalWhitespace = true
} else {
t.addToken(CHILD, 1, "")
t.illegalWhitespace = true
}
case ch == ',':
t.addToken(COMMA, 1, "")
case ch == ':':
t.addToken(ARRAY_SLICE, 1, "")
case ch == '?':
t.addToken(FILTER, 1, "")
case ch == '(':
t.addToken(PAREN_LEFT, 1, "")
t.stack = append(t.stack, PAREN_LEFT)
case ch == ')':
t.addToken(PAREN_RIGHT, 1, "")
if len(t.stack) > 0 && t.stack[len(t.stack)-1] == PAREN_LEFT {
t.stack = t.stack[:len(t.stack)-1]
} else {
t.addToken(ILLEGAL, 1, "unmatched closing parenthesis")
}
case ch == '[':
t.addToken(BRACKET_LEFT, 1, "")
t.stack = append(t.stack, BRACKET_LEFT)
case ch == ']':
if len(t.stack) > 0 && t.stack[len(t.stack)-1] == BRACKET_LEFT {
t.addToken(BRACKET_RIGHT, 1, "")
t.stack = t.stack[:len(t.stack)-1]
} else {
t.addToken(ILLEGAL, 1, "unmatched closing bracket")
}
case ch == '&':
if t.peek() == '&' {
t.addToken(AND, 2, "")
t.pos++
t.column++
} else {
t.addToken(ILLEGAL, 1, "invalid token")
}
case ch == '|':
if t.peek() == '|' {
t.addToken(OR, 2, "")
t.pos++
t.column++
} else {
t.addToken(ILLEGAL, 1, "invalid token")
}
case ch == '!':
if t.peek() == '=' {
t.addToken(NE, 2, "")
t.pos++
t.column++
} else {
t.addToken(NOT, 1, "")
}
case ch == '=':
if t.peek() == '=' {
t.addToken(EQ, 2, "")
t.pos++
t.column++
} else if t.peek() == '~' {
t.addToken(MATCHES, 2, "")
t.pos++
t.column++
} else {
t.addToken(ILLEGAL, 1, "invalid token")
}
case ch == '>':
if t.peek() == '=' {
t.addToken(GE, 2, "")
t.pos++
t.column++
} else {
t.addToken(GT, 1, "")
}
case ch == '<':
if t.peek() == '=' {
t.addToken(LE, 2, "")
t.pos++
t.column++
} else {
t.addToken(LT, 1, "")
}
case ch == '"' || ch == '\'':
t.scanString(rune(ch))
case ch == '-' && isDigit(t.peek()):
fallthrough
case isDigit(ch):
t.scanNumber()
case isLiteralChar(ch):
t.scanLiteral()
default:
t.addToken(ILLEGAL, 1, string(ch))
}
t.pos++
t.column++
}
if len(t.stack) > 0 {
t.addToken(ILLEGAL, 1, fmt.Sprintf("unmatched %s", t.stack[len(t.stack)-1].String()))
}
return t.tokens
}
func (t *Tokenizer) addToken(token Token, len int, literal string) {
t.tokens = append(t.tokens, TokenInfo{
Token: token,
Line: t.line,
Column: t.column,
Len: len,
Literal: literal,
})
t.illegalWhitespace = false
}
func (t *Tokenizer) scanString(quote rune) {
start := t.pos + 1
var literal strings.Builder
illegal:
for i := start; i < len(t.input); i++ {
b := literal.String()
_ = b
if t.input[i] == byte(quote) {
t.addToken(STRING_LITERAL, len(t.input[start:i])+2, literal.String())
t.pos = i
t.column += i - start + 1
return
}
if t.input[i] == '\\' {
i++
if i >= len(t.input) {
t.addToken(ILLEGAL, len(t.input[start:]), literal.String())
t.pos = len(t.input) - 1
t.column = len(t.input) - 1
return
}
switch t.input[i] {
case 'b':
literal.WriteByte('\b')
case 'f':
literal.WriteByte('\f')
case 'n':
literal.WriteByte('\n')
case 'r':
literal.WriteByte('\n')
case 't':
literal.WriteByte('\t')
case '\'':
if quote != '\'' {
// don't escape it, when we're not in a single quoted string
break illegal
} else {
literal.WriteByte(t.input[i])
}
case '"':
if quote != '"' {
// don't escape it, when we're not in a single quoted string
break illegal
} else {
literal.WriteByte(t.input[i])
}
case '\\', '/':
literal.WriteByte(t.input[i])
default:
break illegal
}
} else {
literal.WriteByte(t.input[i])
}
}
t.addToken(ILLEGAL, len(t.input[start:]), literal.String())
t.pos = len(t.input) - 1
t.column = len(t.input) - 1
}
func (t *Tokenizer) scanNumber() {
start := t.pos
tokenType := INTEGER
dotSeen := false
exponentSeen := false
for i := start; i < len(t.input); i++ {
if i == start && t.input[i] == '-' {
continue
}
if t.input[i] == '.' {
if dotSeen || exponentSeen {
t.addToken(ILLEGAL, len(t.input[start:i]), t.input[start:i])
t.pos = i
t.column += i - start
return
}
tokenType = FLOAT
dotSeen = true
continue
}
if t.input[i] == 'e' || t.input[i] == 'E' {
if exponentSeen || (len(t.input) > 0 && t.input[i-1] == '.') {
t.addToken(ILLEGAL, len(t.input[start:i]), t.input[start:i])
t.pos = i
t.column += i - start
return
}
tokenType = FLOAT
exponentSeen = true
if i+1 < len(t.input) && (t.input[i+1] == '+' || t.input[i+1] == '-') {
i++
}
continue
}
if !isDigit(t.input[i]) {
literal := t.input[start:i]
// check for legal numbers
_, err := strconv.ParseFloat(literal, 64)
if err != nil {
tokenType = ILLEGAL
}
// conformance spec
if len(literal) > 1 && literal[0] == '0' && !dotSeen {
// no leading zero
tokenType = ILLEGAL
} else if len(literal) > 2 && literal[0] == '-' && literal[1] == '0' && !dotSeen {
// no trailing dot
tokenType = ILLEGAL
} else if len(literal) > 0 && literal[len(literal)-1] == '.' {
// no trailing dot
tokenType = ILLEGAL
} else if literal[len(literal)-1] == 'e' || literal[len(literal)-1] == 'E' {
// no exponent
tokenType = ILLEGAL
}
t.addToken(tokenType, len(literal), literal)
t.pos = i - 1
t.column += i - start - 1
return
}
}
if exponentSeen && !isDigit(t.input[len(t.input)-1]) {
t.addToken(ILLEGAL, len(t.input[start:]), t.input[start:])
t.pos = len(t.input) - 1
t.column = len(t.input) - 1
return
}
literal := t.input[start:]
t.addToken(tokenType, len(literal), literal)
t.pos = len(t.input) - 1
t.column = len(t.input) - 1
}
func (t *Tokenizer) scanLiteral() {
start := t.pos
for i := start; i < len(t.input); i++ {
if !isLiteralChar(t.input[i]) && !isDigit(t.input[i]) {
literal := t.input[start:i]
switch literal {
case "true":
t.addToken(TRUE, len(literal), literal)
case "false":
t.addToken(FALSE, len(literal), literal)
case "null":
t.addToken(NULL, len(literal), literal)
default:
if isFunctionName(literal) {
t.addToken(FUNCTION, len(literal), literal)
t.illegalWhitespace = true
} else {
t.addToken(STRING, len(literal), literal)
}
}
t.pos = i - 1
t.column += i - start - 1
return
}
}
literal := t.input[start:]
switch literal {
case "true":
t.addToken(TRUE, len(literal), literal)
case "false":
t.addToken(FALSE, len(literal), literal)
case "null":
t.addToken(NULL, len(literal), literal)
default:
t.addToken(STRING, len(literal), literal)
}
t.pos = len(t.input) - 1
t.column = len(t.input) - 1
}
func isFunctionName(literal string) bool {
return literal == "length" || literal == "count" || literal == "match" || literal == "search" || literal == "value"
}
func (t *Tokenizer) skipWhitespace() {
// S = *B ; optional blank space
// B = %x20 / ; Space
// %x09 / ; Horizontal tab
// %x0A / ; Line feed or New line
// %x0D ; Carriage return
for len(t.tokens) > 0 && t.pos+1 < len(t.input) {
ch := t.input[t.pos]
if ch == '\n' {
t.line++
t.pos++
t.column = 0
} else if !isSpace(ch) {
break
} else {
t.pos++
t.column++
}
}
}
func (t *Tokenizer) peek() byte {
if t.pos+1 < len(t.input) {
return t.input[t.pos+1]
}
return 0
}
func isDigit(ch byte) bool {
return '0' <= ch && ch <= '9'
}
func isLiteralChar(ch byte) bool {
// allow unicode characters
return 'a' <= ch && ch <= 'z' || 'A' <= ch && ch <= 'Z' || ch == '_' || ch >= 0x80
}
func isSpace(ch byte) bool {
return ch == ' ' || ch == '\t' || ch == '\r'
}

View file

@ -0,0 +1,278 @@
package jsonpath
import (
"fmt"
"gopkg.in/yaml.v3"
"reflect"
"regexp"
"strconv"
"unicode/utf8"
)
func (l literal) Equals(value literal) bool {
if l.integer != nil && value.integer != nil {
return *l.integer == *value.integer
}
if l.float64 != nil && value.float64 != nil {
return *l.float64 == *value.float64
}
if l.integer != nil && value.float64 != nil {
return float64(*l.integer) == *value.float64
}
if l.float64 != nil && value.integer != nil {
return *l.float64 == float64(*value.integer)
}
if l.string != nil && value.string != nil {
return *l.string == *value.string
}
if l.bool != nil && value.bool != nil {
return *l.bool == *value.bool
}
if l.null != nil && value.null != nil {
return *l.null == *value.null
}
if l.node != nil && value.node != nil {
return equalsNode(l.node, value.node)
}
if reflect.ValueOf(l).IsZero() && reflect.ValueOf(value).IsZero() {
return true
}
return false
}
func equalsNode(a *yaml.Node, b *yaml.Node) bool {
// decode into interfaces, then compare
if a.Tag != b.Tag {
return false
}
switch a.Tag {
case "!!str":
return a.Value == b.Value
case "!!int":
return a.Value == b.Value
case "!!float":
return a.Value == b.Value
case "!!bool":
return a.Value == b.Value
case "!!null":
return a.Value == b.Value
case "!!seq":
if len(a.Content) != len(b.Content) {
return false
}
for i := 0; i < len(a.Content); i++ {
if !equalsNode(a.Content[i], b.Content[i]) {
return false
}
}
case "!!map":
if len(a.Content) != len(b.Content) {
return false
}
for i := 0; i < len(a.Content); i += 2 {
if !equalsNode(a.Content[i], b.Content[i]) {
return false
}
if !equalsNode(a.Content[i+1], b.Content[i+1]) {
return false
}
}
}
return true
}
func (l literal) LessThan(value literal) bool {
if l.integer != nil && value.integer != nil {
return *l.integer < *value.integer
}
if l.float64 != nil && value.float64 != nil {
return *l.float64 < *value.float64
}
if l.integer != nil && value.float64 != nil {
return float64(*l.integer) < *value.float64
}
if l.float64 != nil && value.integer != nil {
return *l.float64 < float64(*value.integer)
}
if l.string != nil && value.string != nil {
return *l.string < *value.string
}
return false
}
func (l literal) LessThanOrEqual(value literal) bool {
return l.LessThan(value) || l.Equals(value)
}
func (c comparable) Evaluate(idx index, node *yaml.Node, root *yaml.Node) literal {
if c.literal != nil {
return *c.literal
}
if c.singularQuery != nil {
return c.singularQuery.Evaluate(idx, node, root)
}
if c.functionExpr != nil {
return c.functionExpr.Evaluate(idx, node, root)
}
return literal{}
}
func (e functionExpr) length(idx index, node *yaml.Node, root *yaml.Node) literal {
args := e.args[0].Eval(idx, node, root)
if args.kind != functionArgTypeLiteral {
return literal{}
}
//* If the argument value is a string, the result is the number of
//Unicode scalar values in the string.
if args.literal != nil && args.literal.string != nil {
res := utf8.RuneCountInString(*args.literal.string)
return literal{integer: &res}
}
//* If the argument value is an array, the result is the number of
//elements in the array.
//
//* If the argument value is an object, the result is the number of
//members in the object.
//
//* For any other argument value, the result is the special result
//Nothing.
if args.literal.node != nil {
switch args.literal.node.Kind {
case yaml.SequenceNode:
res := len(args.literal.node.Content)
return literal{integer: &res}
case yaml.MappingNode:
res := len(args.literal.node.Content) / 2
return literal{integer: &res}
}
}
return literal{}
}
func (e functionExpr) count(idx index, node *yaml.Node, root *yaml.Node) literal {
args := e.args[0].Eval(idx, node, root)
if args.kind == functionArgTypeNodes {
res := len(args.nodes)
return literal{integer: &res}
}
res := 1
return literal{integer: &res}
}
func (e functionExpr) match(idx index, node *yaml.Node, root *yaml.Node) literal {
arg1 := e.args[0].Eval(idx, node, root)
arg2 := e.args[1].Eval(idx, node, root)
if arg1.kind != functionArgTypeLiteral || arg2.kind != functionArgTypeLiteral {
return literal{}
}
if arg1.literal.string == nil || arg2.literal.string == nil {
return literal{bool: &[]bool{false}[0]}
}
matched, _ := regexp.MatchString(fmt.Sprintf("^(%s)$", *arg2.literal.string), *arg1.literal.string)
return literal{bool: &matched}
}
func (e functionExpr) search(idx index, node *yaml.Node, root *yaml.Node) literal {
arg1 := e.args[0].Eval(idx, node, root)
arg2 := e.args[1].Eval(idx, node, root)
if arg1.kind != functionArgTypeLiteral || arg2.kind != functionArgTypeLiteral {
return literal{}
}
if arg1.literal.string == nil || arg2.literal.string == nil {
return literal{bool: &[]bool{false}[0]}
}
matched, _ := regexp.MatchString(*arg2.literal.string, *arg1.literal.string)
return literal{bool: &matched}
}
func (e functionExpr) value(idx index, node *yaml.Node, root *yaml.Node) literal {
// 2.4.8. value() Function Extension
//
//Parameters:
// 1. NodesType
//
//Result: ValueType
//Its only argument is an instance of NodesType (possibly taken from a
//filter-query, as in the example above). The result is an instance of
//ValueType.
//
//* If the argument contains a single node, the result is the value of
//the node.
//
//* If the argument is the empty nodelist or contains multiple nodes,
// the result is Nothing.
nodesType := e.args[0].Eval(idx, node, root)
if nodesType.kind == functionArgTypeLiteral {
return *nodesType.literal
} else if nodesType.kind == functionArgTypeNodes && len(nodesType.nodes) == 1 {
return *nodesType.nodes[0]
}
return literal{}
}
func nodeToLiteral(node *yaml.Node) literal {
switch node.Tag {
case "!!str":
return literal{string: &node.Value}
case "!!int":
i, _ := strconv.Atoi(node.Value)
return literal{integer: &i}
case "!!float":
f, _ := strconv.ParseFloat(node.Value, 64)
return literal{float64: &f}
case "!!bool":
b, _ := strconv.ParseBool(node.Value)
return literal{bool: &b}
case "!!null":
b := true
return literal{null: &b}
default:
return literal{node: node}
}
}
func (e functionExpr) Evaluate(idx index, node *yaml.Node, root *yaml.Node) literal {
switch e.funcType {
case functionTypeLength:
return e.length(idx, node, root)
case functionTypeCount:
return e.count(idx, node, root)
case functionTypeMatch:
return e.match(idx, node, root)
case functionTypeSearch:
return e.search(idx, node, root)
case functionTypeValue:
return e.value(idx, node, root)
}
return literal{}
}
func (q singularQuery) Evaluate(idx index, node *yaml.Node, root *yaml.Node) literal {
if q.relQuery != nil {
return q.relQuery.Evaluate(idx, node, root)
}
if q.absQuery != nil {
return q.absQuery.Evaluate(idx, node, root)
}
return literal{}
}
func (q relQuery) Evaluate(idx index, node *yaml.Node, root *yaml.Node) literal {
result := q.Query(idx, node, root)
if len(result) == 1 {
return nodeToLiteral(result[0])
}
return literal{}
}
func (q absQuery) Evaluate(idx index, node *yaml.Node, root *yaml.Node) literal {
result := q.Query(idx, root, root)
if len(result) == 1 {
return nodeToLiteral(result[0])
}
return literal{}
}

View file

@ -0,0 +1,393 @@
package jsonpath
import (
"gopkg.in/yaml.v3"
)
type Evaluator interface {
Query(current *yaml.Node, root *yaml.Node) []*yaml.Node
}
type index interface {
setPropertyKey(key *yaml.Node, value *yaml.Node)
getPropertyKey(key *yaml.Node) *yaml.Node
}
type _index struct {
propertyKeys map[*yaml.Node]*yaml.Node
}
func (i *_index) setPropertyKey(key *yaml.Node, value *yaml.Node) {
if i != nil && i.propertyKeys != nil {
i.propertyKeys[key] = value
}
}
func (i *_index) getPropertyKey(key *yaml.Node) *yaml.Node {
if i != nil {
return i.propertyKeys[key]
}
return nil
}
// jsonPathAST can be Evaluated
var _ Evaluator = jsonPathAST{}
func (q jsonPathAST) Query(current *yaml.Node, root *yaml.Node) []*yaml.Node {
idx := _index{
propertyKeys: map[*yaml.Node]*yaml.Node{},
}
result := make([]*yaml.Node, 0)
// If the top level node is a documentnode, unwrap it
if root.Kind == yaml.DocumentNode && len(root.Content) == 1 {
root = root.Content[0]
}
result = append(result, root)
for _, segment := range q.segments {
newValue := []*yaml.Node{}
for _, value := range result {
newValue = append(newValue, segment.Query(&idx, value, root)...)
}
result = newValue
}
return result
}
func (s segment) Query(idx index, value *yaml.Node, root *yaml.Node) []*yaml.Node {
switch s.kind {
case segmentKindChild:
return s.child.Query(idx, value, root)
case segmentKindDescendant:
// run the inner segment against this node
var result = []*yaml.Node{}
children := descend(value, root)
for _, child := range children {
result = append(result, s.descendant.Query(idx, child, root)...)
}
// make children unique by pointer value
result = unique(result)
return result
case segmentKindProperyName:
found := idx.getPropertyKey(value)
if found != nil {
return []*yaml.Node{found}
}
return []*yaml.Node{}
}
panic("no segment type")
}
func unique(nodes []*yaml.Node) []*yaml.Node {
// stably returns a new slice containing only the unique elements from nodes
res := make([]*yaml.Node, 0)
seen := make(map[*yaml.Node]bool)
for _, node := range nodes {
if _, ok := seen[node]; !ok {
res = append(res, node)
seen[node] = true
}
}
return res
}
func (s innerSegment) Query(idx index, value *yaml.Node, root *yaml.Node) []*yaml.Node {
result := []*yaml.Node{}
switch s.kind {
case segmentDotWildcard:
// Handle wildcard - get all children
switch value.Kind {
case yaml.MappingNode:
// in a mapping node, keys and values alternate
// we just want to return the values
for i, child := range value.Content {
if i%2 == 1 {
idx.setPropertyKey(value.Content[i-1], value)
idx.setPropertyKey(child, value.Content[i-1])
result = append(result, child)
}
}
case yaml.SequenceNode:
for _, child := range value.Content {
result = append(result, child)
}
}
return result
case segmentDotMemberName:
// Handle member access
if value.Kind == yaml.MappingNode {
// In YAML mapping nodes, keys and values alternate
for i := 0; i < len(value.Content); i += 2 {
key := value.Content[i]
val := value.Content[i+1]
if key.Value == s.dotName {
idx.setPropertyKey(key, value)
idx.setPropertyKey(val, key)
result = append(result, val)
break
}
}
}
case segmentLongHand:
// Handle long hand selectors
for _, selector := range s.selectors {
result = append(result, selector.Query(idx, value, root)...)
}
default:
panic("unknown child segment kind")
}
return result
}
func (s selector) Query(idx index, value *yaml.Node, root *yaml.Node) []*yaml.Node {
switch s.kind {
case selectorSubKindName:
if value.Kind != yaml.MappingNode {
return nil
}
// MappingNode children is a list of alternating keys and values
var key string
for i, child := range value.Content {
if i%2 == 0 {
key = child.Value
continue
}
if key == s.name && i%2 == 1 {
idx.setPropertyKey(value.Content[i], value.Content[i-1])
idx.setPropertyKey(value.Content[i-1], value)
return []*yaml.Node{child}
}
}
case selectorSubKindArrayIndex:
if value.Kind != yaml.SequenceNode {
return nil
}
// if out of bounds, return nothing
if s.index >= int64(len(value.Content)) || s.index < -int64(len(value.Content)) {
return nil
}
// if index is negative, go backwards
if s.index < 0 {
return []*yaml.Node{value.Content[int64(len(value.Content))+s.index]}
}
return []*yaml.Node{value.Content[s.index]}
case selectorSubKindWildcard:
if value.Kind == yaml.SequenceNode {
return value.Content
} else if value.Kind == yaml.MappingNode {
var result []*yaml.Node
for i, child := range value.Content {
if i%2 == 1 {
idx.setPropertyKey(value.Content[i-1], value)
idx.setPropertyKey(child, value.Content[i-1])
result = append(result, child)
}
}
return result
}
return nil
case selectorSubKindArraySlice:
if value.Kind != yaml.SequenceNode {
return nil
}
if len(value.Content) == 0 {
return nil
}
step := int64(1)
if s.slice.step != nil {
step = *s.slice.step
}
if step == 0 {
return nil
}
start, end := s.slice.start, s.slice.end
lower, upper := bounds(start, end, step, int64(len(value.Content)))
var result []*yaml.Node
if step > 0 {
for i := lower; i < upper; i += step {
result = append(result, value.Content[i])
}
} else {
for i := upper; i > lower; i += step {
result = append(result, value.Content[i])
}
}
return result
case selectorSubKindFilter:
var result []*yaml.Node
switch value.Kind {
case yaml.MappingNode:
for i := 1; i < len(value.Content); i += 2 {
idx.setPropertyKey(value.Content[i-1], value)
idx.setPropertyKey(value.Content[i], value.Content[i-1])
if s.filter.Matches(idx, value.Content[i], root) {
result = append(result, value.Content[i])
}
}
case yaml.SequenceNode:
for _, child := range value.Content {
if s.filter.Matches(idx, child, root) {
result = append(result, child)
}
}
}
return result
}
return nil
}
func normalize(i, length int64) int64 {
if i >= 0 {
return i
}
return length + i
}
func bounds(start, end *int64, step, length int64) (int64, int64) {
var nStart, nEnd int64
if start != nil {
nStart = normalize(*start, length)
} else if step > 0 {
nStart = 0
} else {
nStart = length - 1
}
if end != nil {
nEnd = normalize(*end, length)
} else if step > 0 {
nEnd = length
} else {
nEnd = -1
}
var lower, upper int64
if step >= 0 {
lower = max(min(nStart, length), 0)
upper = min(max(nEnd, 0), length)
} else {
upper = min(max(nStart, -1), length-1)
lower = min(max(nEnd, -1), length-1)
}
return lower, upper
}
func (s filterSelector) Matches(idx index, node *yaml.Node, root *yaml.Node) bool {
return s.expression.Matches(idx, node, root)
}
func (e logicalOrExpr) Matches(idx index, node *yaml.Node, root *yaml.Node) bool {
for _, expr := range e.expressions {
if expr.Matches(idx, node, root) {
return true
}
}
return false
}
func (e logicalAndExpr) Matches(idx index, node *yaml.Node, root *yaml.Node) bool {
for _, expr := range e.expressions {
if !expr.Matches(idx, node, root) {
return false
}
}
return true
}
func (e basicExpr) Matches(idx index, node *yaml.Node, root *yaml.Node) bool {
if e.parenExpr != nil {
result := e.parenExpr.expr.Matches(idx, node, root)
if e.parenExpr.not {
return !result
}
return result
} else if e.comparisonExpr != nil {
return e.comparisonExpr.Matches(idx, node, root)
} else if e.testExpr != nil {
return e.testExpr.Matches(idx, node, root)
}
return false
}
func (e comparisonExpr) Matches(idx index, node *yaml.Node, root *yaml.Node) bool {
leftValue := e.left.Evaluate(idx, node, root)
rightValue := e.right.Evaluate(idx, node, root)
switch e.op {
case equalTo:
return leftValue.Equals(rightValue)
case notEqualTo:
return !leftValue.Equals(rightValue)
case lessThan:
return leftValue.LessThan(rightValue)
case lessThanEqualTo:
return leftValue.LessThanOrEqual(rightValue)
case greaterThan:
return rightValue.LessThan(leftValue)
case greaterThanEqualTo:
return rightValue.LessThanOrEqual(leftValue)
default:
return false
}
}
func (e testExpr) Matches(idx index, node *yaml.Node, root *yaml.Node) bool {
var result bool
if e.filterQuery != nil {
result = len(e.filterQuery.Query(idx, node, root)) > 0
} else if e.functionExpr != nil {
funcResult := e.functionExpr.Evaluate(idx, node, root)
if funcResult.bool != nil {
result = *funcResult.bool
} else if funcResult.null == nil {
result = true
}
}
if e.not {
return !result
}
return result
}
func (q filterQuery) Query(idx index, node *yaml.Node, root *yaml.Node) []*yaml.Node {
if q.relQuery != nil {
return q.relQuery.Query(idx, node, root)
}
if q.jsonPathQuery != nil {
return q.jsonPathQuery.Query(node, root)
}
return nil
}
func (q relQuery) Query(idx index, node *yaml.Node, root *yaml.Node) []*yaml.Node {
result := []*yaml.Node{node}
for _, seg := range q.segments {
var newResult []*yaml.Node
for _, value := range result {
newResult = append(newResult, seg.Query(idx, value, root)...)
}
result = newResult
}
return result
}
func (q absQuery) Query(idx index, node *yaml.Node, root *yaml.Node) []*yaml.Node {
result := []*yaml.Node{root}
for _, seg := range q.segments {
var newResult []*yaml.Node
for _, value := range result {
newResult = append(newResult, seg.Query(idx, value, root)...)
}
result = newResult
}
return result
}