Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 9 additions & 9 deletions docs/usage/parser.rst
Original file line number Diff line number Diff line change
Expand Up @@ -35,30 +35,30 @@ This will give the same result as manually creating the AST document::

from graphql.language.ast import *

document = DocumentNode(definitions=[
document = DocumentNode(definitions=(
ObjectTypeDefinitionNode(
name=NameNode(value='Query'),
fields=[
fields=(
FieldDefinitionNode(
name=NameNode(value='me'),
type=NamedTypeNode(name=NameNode(value='User')),
arguments=[], directives=[])
], directives=[], interfaces=[]),
arguments=(), directives=()),
), interfaces=(), directives=()),
ObjectTypeDefinitionNode(
name=NameNode(value='User'),
fields=[
fields=(
FieldDefinitionNode(
name=NameNode(value='id'),
type=NamedTypeNode(
name=NameNode(value='ID')),
arguments=[], directives=[]),
arguments=(), directives=()),
FieldDefinitionNode(
name=NameNode(value='name'),
type=NamedTypeNode(
name=NameNode(value='String')),
arguments=[], directives=[]),
], directives=[], interfaces=[]),
])
arguments=(), directives=()),
), interfaces=(), directives=()),
))


When parsing with ``no_location=False`` (the default), the AST nodes will also have a
Expand Down
73 changes: 37 additions & 36 deletions src/graphql/language/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
from __future__ import annotations

from functools import partial
from typing import Callable, List, Mapping, TypeVar, Union, cast
from typing import Callable, Mapping, TypeVar, Union, cast

from ..error import GraphQLError, GraphQLSyntaxError
from .ast import (
Expand Down Expand Up @@ -349,8 +349,8 @@ def parse_operation_definition(self) -> OperationDefinitionNode:
return OperationDefinitionNode(
operation=OperationType.QUERY,
name=None,
variable_definitions=[],
directives=[],
variable_definitions=(),
directives=(),
selection_set=self.parse_selection_set(),
loc=self.loc(start),
)
Expand All @@ -373,7 +373,7 @@ def parse_operation_type(self) -> OperationType:
except ValueError as error:
raise self.unexpected(operation_token) from error

def parse_variable_definitions(self) -> list[VariableDefinitionNode]:
def parse_variable_definitions(self) -> tuple[VariableDefinitionNode, ...]:
"""VariableDefinitions: (VariableDefinition+)"""
return self.optional_many(
TokenKind.PAREN_L, self.parse_variable_definition, TokenKind.PAREN_R
Expand Down Expand Up @@ -468,7 +468,7 @@ def parse_nullability_assertion(self) -> NullabilityAssertionNode | None:

return nullability_assertion

def parse_arguments(self, is_const: bool) -> list[ArgumentNode]:
def parse_arguments(self, is_const: bool) -> tuple[ArgumentNode, ...]:
"""Arguments[Const]: (Argument[?Const]+)"""
item = self.parse_const_argument if is_const else self.parse_argument
return self.optional_many(
Expand Down Expand Up @@ -533,6 +533,7 @@ def parse_fragment_definition(self) -> FragmentDefinitionNode:
)
return FragmentDefinitionNode(
name=self.parse_fragment_name(),
variable_definitions=(),
type_condition=self.parse_type_condition(),
directives=self.parse_directives(False),
selection_set=self.parse_selection_set(),
Expand Down Expand Up @@ -646,16 +647,16 @@ def parse_const_value_literal(self) -> ConstValueNode:

# Implement the parsing rules in the Directives section.

def parse_directives(self, is_const: bool) -> list[DirectiveNode]:
def parse_directives(self, is_const: bool) -> tuple[DirectiveNode, ...]:
"""Directives[Const]: Directive[?Const]+"""
directives: list[DirectiveNode] = []
append = directives.append
while self.peek(TokenKind.AT):
append(self.parse_directive(is_const))
return directives
return tuple(directives)

def parse_const_directives(self) -> list[ConstDirectiveNode]:
return cast("List[ConstDirectiveNode]", self.parse_directives(True))
def parse_const_directives(self) -> tuple[ConstDirectiveNode, ...]:
return cast("tuple[ConstDirectiveNode, ...]", self.parse_directives(True))

def parse_directive(self, is_const: bool) -> DirectiveNode:
"""Directive[Const]: @ Name Arguments[?Const]?"""
Expand Down Expand Up @@ -778,15 +779,15 @@ def parse_object_type_definition(self) -> ObjectTypeDefinitionNode:
loc=self.loc(start),
)

def parse_implements_interfaces(self) -> list[NamedTypeNode]:
def parse_implements_interfaces(self) -> tuple[NamedTypeNode, ...]:
"""ImplementsInterfaces"""
return (
self.delimited_many(TokenKind.AMP, self.parse_named_type)
if self.expect_optional_keyword("implements")
else []
else ()
)

def parse_fields_definition(self) -> list[FieldDefinitionNode]:
def parse_fields_definition(self) -> tuple[FieldDefinitionNode, ...]:
"""FieldsDefinition: {FieldDefinition+}"""
return self.optional_many(
TokenKind.BRACE_L, self.parse_field_definition, TokenKind.BRACE_R
Expand All @@ -810,7 +811,7 @@ def parse_field_definition(self) -> FieldDefinitionNode:
loc=self.loc(start),
)

def parse_argument_defs(self) -> list[InputValueDefinitionNode]:
def parse_argument_defs(self) -> tuple[InputValueDefinitionNode, ...]:
"""ArgumentsDefinition: (InputValueDefinition+)"""
return self.optional_many(
TokenKind.PAREN_L, self.parse_input_value_def, TokenKind.PAREN_R
Expand Down Expand Up @@ -872,12 +873,12 @@ def parse_union_type_definition(self) -> UnionTypeDefinitionNode:
loc=self.loc(start),
)

def parse_union_member_types(self) -> list[NamedTypeNode]:
def parse_union_member_types(self) -> tuple[NamedTypeNode, ...]:
"""UnionMemberTypes"""
return (
self.delimited_many(TokenKind.PIPE, self.parse_named_type)
if self.expect_optional_token(TokenKind.EQUALS)
else []
else ()
)

def parse_enum_type_definition(self) -> EnumTypeDefinitionNode:
Expand All @@ -896,7 +897,7 @@ def parse_enum_type_definition(self) -> EnumTypeDefinitionNode:
loc=self.loc(start),
)

def parse_enum_values_definition(self) -> list[EnumValueDefinitionNode]:
def parse_enum_values_definition(self) -> tuple[EnumValueDefinitionNode, ...]:
"""EnumValuesDefinition: {EnumValueDefinition+}"""
return self.optional_many(
TokenKind.BRACE_L, self.parse_enum_value_definition, TokenKind.BRACE_R
Expand Down Expand Up @@ -942,7 +943,7 @@ def parse_input_object_type_definition(self) -> InputObjectTypeDefinitionNode:
loc=self.loc(start),
)

def parse_input_fields_definition(self) -> list[InputValueDefinitionNode]:
def parse_input_fields_definition(self) -> tuple[InputValueDefinitionNode, ...]:
"""InputFieldsDefinition: {InputValueDefinition+}"""
return self.optional_many(
TokenKind.BRACE_L, self.parse_input_value_def, TokenKind.BRACE_R
Expand Down Expand Up @@ -1076,7 +1077,7 @@ def parse_directive_definition(self) -> DirectiveDefinitionNode:
loc=self.loc(start),
)

def parse_directive_locations(self) -> list[NameNode]:
def parse_directive_locations(self) -> tuple[NameNode, ...]:
"""DirectiveLocations"""
return self.delimited_many(TokenKind.PIPE, self.parse_directive_location)

Expand Down Expand Up @@ -1173,11 +1174,11 @@ def unexpected(self, at_token: Token | None = None) -> GraphQLError:

def any(
self, open_kind: TokenKind, parse_fn: Callable[[], T], close_kind: TokenKind
) -> list[T]:
) -> tuple[T, ...]:
"""Fetch any matching nodes, possibly none.

Returns a possibly empty list of parse nodes, determined by the ``parse_fn``.
This list begins with a lex token of ``open_kind`` and ends with a lex token of
Returns a possibly empty tuple of parse nodes, determined by the ``parse_fn``.
This tuple begins with a lex token of ``open_kind`` and ends with a lex token of
``close_kind``. Advances the parser to the next lex token after the closing
token.
"""
Expand All @@ -1187,16 +1188,16 @@ def any(
expect_optional_token = partial(self.expect_optional_token, close_kind)
while not expect_optional_token():
append(parse_fn())
return nodes
return tuple(nodes)

def optional_many(
self, open_kind: TokenKind, parse_fn: Callable[[], T], close_kind: TokenKind
) -> list[T]:
) -> tuple[T, ...]:
"""Fetch matching nodes, maybe none.

Returns a list of parse nodes, determined by the ``parse_fn``. It can be empty
Returns a tuple of parse nodes, determined by the ``parse_fn``. It can be empty
only if the open token is missing, otherwise it will always return a non-empty
list that begins with a lex token of ``open_kind`` and ends with a lex token of
tuple that begins with a lex token of ``open_kind`` and ends with a lex token of
``close_kind``. Advances the parser to the next lex token after the closing
token.
"""
Expand All @@ -1206,16 +1207,16 @@ def optional_many(
expect_optional_token = partial(self.expect_optional_token, close_kind)
while not expect_optional_token():
append(parse_fn())
return nodes
return []
return tuple(nodes)
return ()

def many(
self, open_kind: TokenKind, parse_fn: Callable[[], T], close_kind: TokenKind
) -> list[T]:
) -> tuple[T, ...]:
"""Fetch matching nodes, at least one.

Returns a non-empty list of parse nodes, determined by the ``parse_fn``. This
list begins with a lex token of ``open_kind`` and ends with a lex token of
Returns a non-empty tuple of parse nodes, determined by the ``parse_fn``. This
tuple begins with a lex token of ``open_kind`` and ends with a lex token of
``close_kind``. Advances the parser to the next lex token after the closing
token.
"""
Expand All @@ -1225,17 +1226,17 @@ def many(
expect_optional_token = partial(self.expect_optional_token, close_kind)
while not expect_optional_token():
append(parse_fn())
return nodes
return tuple(nodes)

def delimited_many(
self, delimiter_kind: TokenKind, parse_fn: Callable[[], T]
) -> list[T]:
) -> tuple[T, ...]:
"""Fetch many delimited nodes.

Returns a non-empty list of parse nodes, determined by the ``parse_fn``. This
list may begin with a lex token of ``delimiter_kind`` followed by items
Returns a non-empty tuple of parse nodes, determined by the ``parse_fn``. This
tuple may begin with a lex token of ``delimiter_kind`` followed by items
separated by lex tokens of ``delimiter_kind``. Advances the parser to the next
lex token after the last item in the list.
lex token after the last item in the tuple.
"""
expect_optional_token = partial(self.expect_optional_token, delimiter_kind)
expect_optional_token()
Expand All @@ -1245,7 +1246,7 @@ def delimited_many(
append(parse_fn())
if not expect_optional_token():
break
return nodes
return tuple(nodes)

def advance_lexer(self) -> None:
"""Advance the lexer."""
Expand Down
8 changes: 6 additions & 2 deletions src/graphql/utilities/ast_to_dict.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,14 +45,18 @@ def ast_to_dict(
elif node in cache:
return cache[node]
cache[node] = res = {}
# Note: We don't use msgspec.structs.asdict() because loc needs special
# handling (converted to {start, end} dict rather than full Location object)
# Filter out 'loc' - it's handled separately for the locations option
fields = [f for f in node.keys if f != "loc"]
res.update(
{
key: ast_to_dict(getattr(node, key), locations, cache)
for key in ("kind", *node.keys[1:])
for key in ("kind", *fields)
}
)
if locations:
loc = node.loc
loc = getattr(node, "loc", None)
if loc:
res["loc"] = {"start": loc.start, "end": loc.end}
return res
Expand Down
5 changes: 2 additions & 3 deletions src/graphql/utilities/concat_ast.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,5 @@ def concat_ast(asts: Collection[DocumentNode]) -> DocumentNode:
the ASTs together into batched AST, useful for validating many GraphQL source files
which together represent one conceptual application.
"""
return DocumentNode(
definitions=list(chain.from_iterable(document.definitions for document in asts))
)
all_definitions = chain.from_iterable(doc.definitions for doc in asts)
return DocumentNode(definitions=tuple(all_definitions))
4 changes: 2 additions & 2 deletions src/graphql/utilities/separate_operations.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,15 +60,15 @@ def separate_operations(document_ast: DocumentNode) -> dict[str, DocumentNode]:
# The list of definition nodes to be included for this operation, sorted
# to retain the same order as the original document.
separated_document_asts[operation_name] = DocumentNode(
definitions=[
definitions=tuple(
node
for node in document_ast.definitions
if node is operation
or (
isinstance(node, FragmentDefinitionNode)
and node.name.value in dependencies
)
]
)
)

return separated_document_asts
Expand Down
21 changes: 12 additions & 9 deletions src/graphql/utilities/sort_value_node.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,6 @@

from __future__ import annotations

from copy import copy

from ..language import ListValueNode, ObjectFieldNode, ObjectValueNode, ValueNode
from ..pyutils import natural_comparison_key

Expand All @@ -18,18 +16,23 @@ def sort_value_node(value_node: ValueNode) -> ValueNode:
For internal use only.
"""
if isinstance(value_node, ObjectValueNode):
value_node = copy(value_node)
value_node.fields = sort_fields(value_node.fields)
# Create new node with updated fields (immutable-friendly copy-on-write)
values = {k: getattr(value_node, k) for k in value_node.keys}
values["fields"] = sort_fields(value_node.fields)
value_node = value_node.__class__(**values)
elif isinstance(value_node, ListValueNode):
value_node = copy(value_node)
value_node.values = tuple(sort_value_node(value) for value in value_node.values)
# Create new node with updated values (immutable-friendly copy-on-write)
values = {k: getattr(value_node, k) for k in value_node.keys}
values["values"] = tuple(sort_value_node(value) for value in value_node.values)
value_node = value_node.__class__(**values)
return value_node


def sort_field(field: ObjectFieldNode) -> ObjectFieldNode:
field = copy(field)
field.value = sort_value_node(field.value)
return field
# Create new node with updated value (immutable-friendly copy-on-write)
values = {k: getattr(field, k) for k in field.keys}
values["value"] = sort_value_node(field.value)
return field.__class__(**values)


def sort_fields(fields: tuple[ObjectFieldNode, ...]) -> tuple[ObjectFieldNode, ...]:
Expand Down
Loading
Loading